Exemplo n.º 1
0
 def mark_for_export(self, source):
     """        
     Adds the given source to the list of datasets that need to be
     exported. Returns filename that the exported Dataset will
     have.
     """
     if not isinstance(source, Dataset):
         logger.error("Invalid source given. Not a Dataset.")
         return None
     
     # The format of self.exports is:
     #  key: source object (=> id)
     #  value: (filename, dataset_change_counter, dataset object)
     if self.exports.has_key(source) is False:
         logger.debug("Marking %s for export" % source)
         filename = utils.as_filename(source.key)
         new_export = [filename, -1, source]
         self.exports[source] = new_export
         return new_export[0]
     else:
         return self.exports[source][0]
Exemplo n.º 2
0
 def export_datasets(self):
     # Export Datasets to temporary directory, so that
     # gnuplot can access them.
     exporter = exporter_registry['ASCII']()
     
     destdir = self.tmpdir
     for (source, value) in self.exports.iteritems():
         (filename, change_counter, ds) = value
         if ds is None:
             logger.warn("One of the Datasets to export is None.")
             continue
         if ds.is_empty():
             logger.warn("One of the Datasets to export is empty")
             continue
         logging.debug("Change counter %d, old %d" % (ds.change_counter, change_counter))
         if ds.has_changes(change_counter):                              
             filename = os.path.join(destdir, utils.as_filename(filename))
             logger.debug('exporting "%s" to dir "%s"' % (ds, destdir))            
             exporter.write_to_file(filename, ds.data)
             self.exports[source][1] = ds.change_counter
         else:
             logger.info("Dataset has not changed and is not exported!")                           
Exemplo n.º 3
0
def new_dataset(spj, element):
    ncols = int(element.attrib.pop('ncols',0))
    typecodes = element.attrib.pop('typecodes','')

    # TODO: pass fileformat_version to importer (somehow)
    # TODO: how?
    fileformat = element.attrib.pop('fileformat', 'CSV')
    fileformat_version = element.attrib.pop('fileformat_version', None)
    ds = Dataset(**element.attrib)

    # metadata
    for eMetaitem in element.findall('Metadata/Metaitem'):
        key = eMetaitem.attrib['key']
        value = eMetaitem.text
        ds.metadata[key] = unicode(value)

    # actual Table
    if element.tag == 'Table':

        # Extract additional column information.
        # This information will be passed on to 'set_table_import',
        # which will pass it on to the internal importer.        
        column_props = list()
        for i in range(ncols):
            column_props.append(dict())
        
        for eColumn in element.findall('Column'):
            n = int(eColumn.get('n'))
            p = column_props[n]
            for eInfo in eColumn.findall('Info'):
                key = eInfo.get('key', None)
                if key is not None:
                    p[key] = unicode(eInfo.text)
        
        filename = os.path.join('datasets', utils.as_filename(ds.key))
        ds.set_table_import(spj, filename, typecodes, column_props, fileformat)
        
    
    return ds
Exemplo n.º 4
0
    def __call__(self, ds):

        filename = os.path.join('datasets', utils.as_filename(self.key))
    
        try:
            archive = tarfile.open(self.projectname, 'r:gz')
        except tarfile.ReadError:
            logger.error('Error while opening archive "%s"' % self.projectname)
            raise FileNotFoundError

        if self.destdir is None:
            tempdir = tempfile.mkdtemp(prefix="spj-temp-")
        else:
            tempdir = self.destdir
            
        try:
            archive.extract(filename, tempdir)
            importer = globals.importer_registry['ASCII'](dataset=ds)
            return importer.read_dataset_from_file(os.path.join(tempdir, filename))
        finally:
            if self.destdir is None:
                shutil.rmtree(tempdir)
            archive.close()
Exemplo n.º 5
0
def save_project(spj, filename=None, path=None):
    """
    Write the whole project to a file.  Return True on success.
    
    The archive that is created is a gzipped tar file containing
    the XML file with the project info and additionally the data
    files containing the information from the current Dataset
    objects.
    """

    #
    # write project XML file
    #

    tempdir = tempfile.mkdtemp(prefix="spj-export-")
    filename = filename or spj.filename
    if filename is None:
        raise RuntimeError("No valid filename specified.")                                              

    try:
        projectfile = os.path.join( tempdir,'project.xml' )
        e = toElement(spj)

        fd = open(projectfile, 'w')
        fd.write('<?xml version="1.0" encoding="utf-8"?>\n')
        ElementTree(e).write(fd, encoding="utf-8")
        fd.close()

        #
        # now add all extra information to the tempdir
        # (Datasets and other files)
        #

        # add Dataset files to tempdir
        exporter_ascii = globals.exporter_registry['CSV']()
        
        dsdir = os.path.join(tempdir, 'datasets')
        os.mkdir(dsdir)
        for ds in spj.datasets:
            try:
                ds.get_array()
                dspath = os.path.join(dsdir, utils.as_filename(ds.key))
                exporter_ascii.write_to_file(dspath, ds)                
            except AttributeError:
                logger.error("Error while writing Dataset '%s'" % ds.key)
                raise
            except error.NoData:
                logger.error("Warning, empty Dataset -- no data file written.")

        #
        # create tar archive from tempdir
        #
        try:
            archive = None
            try:
                if path is not None:
                    filename = os.path.join(path, os.path.basename(filename))
                logger.info("Writing archive '%s'" % filename)
                archive = tarfile.open(filename, mode="w:gz")
                archive.add( tempdir, '' )
            except IOError,(nr, msg):
                raise error.SloppyError('Error while creating archive "%s": %s' % (filename, msg))
                return False

        finally:
            if archive is not None:
                archive.close()
    finally:
        logger.debug("Removing directory %s" % tempdir)
        shutil.rmtree(tempdir)
        
    logger.debug("Finished writing '%s'" % filename)
    return True