Beispiel #1
0
    def test_system_combinations(self):
        _to_stream = [
            True,
            False
        ]
        _to_file = [
            os.path.join(env.DIR_OUTPUT, 'test_ocgis_log.log'),
            None
        ]

        _level = [logging.INFO, logging.DEBUG, logging.WARN]
        for ii, (to_file, to_stream, level) in enumerate(itertools.product(_to_file, _to_stream, _level)):
            ocgis_lh.configure(to_file=to_file, to_stream=to_stream, level=level)
            try:
                ocgis_lh(ii)
                ocgis_lh('a test message')
                subset = ocgis_lh.get_logger('subset')
                interp = ocgis_lh.get_logger('interp')
                ocgis_lh('a subset message', logger=subset)
                ocgis_lh('an interp message', logger=interp)
                ocgis_lh('a general message', alias='foo', ugid=10)
                ocgis_lh('another message', level=level)
                if to_file is not None:
                    self.assertTrue(os.path.exists(to_file))
                    os.remove(to_file)
            finally:
                logging.shutdown()
Beispiel #2
0
    def test_system_combinations(self):
        _to_stream = [
            True,
            False
        ]
        _to_file = [
            os.path.join(env.DIR_OUTPUT, 'test_ocgis_log.log'),
            None
        ]

        _level = [logging.INFO, logging.DEBUG, logging.WARN]
        for ii, (to_file, to_stream, level) in enumerate(itertools.product(_to_file, _to_stream, _level)):
            ocgis_lh.configure(to_file=to_file, to_stream=to_stream, level=level)
            try:
                ocgis_lh(ii)
                ocgis_lh('a test message')
                subset = ocgis_lh.get_logger('subset')
                interp = ocgis_lh.get_logger('interp')
                ocgis_lh('a subset message', logger=subset)
                ocgis_lh('an interp message', logger=interp)
                ocgis_lh('a general message', alias='foo', ugid=10)
                ocgis_lh('another message', level=level)
                if to_file is not None:
                    self.assertTrue(os.path.exists(to_file))
                    os.remove(to_file)
            finally:
                logging.shutdown()
Beispiel #3
0
    def __init__(self,ops,serial=True,nprocs=1):
        self.ops = ops
        self.serial = serial
        self.nprocs = nprocs
        
        self._subset_log = ocgis_lh.get_logger('subset')

        ## create the calculation engine
        if self.ops.calc is None:
            self.cengine = None
        else:
            ocgis_lh('initializing calculation engine',self._subset_log,level=logging.DEBUG)
            self.cengine = OcgCalculationEngine(self.ops.calc_grouping,
                                           self.ops.calc,
                                           raw=self.ops.calc_raw,
                                           agg=self.ops.aggregate,
                                           calc_sample_size=self.ops.calc_sample_size)
            
        ## in the case of netcdf output, geometries must be unioned. this is
        ## also true for the case of the selection geometry being requested as
        ## aggregated.
        if (self.ops.output_format == 'nc' or self.ops.agg_selection is True) \
         and self.ops.geom is not None:
            ocgis_lh('aggregating selection geometry',self._subset_log)
            build = True
            for element_geom in self.ops.geom:
                if build:
                    new_geom = element_geom['geom']
                    new_crs = element_geom['crs']
                    new_properties = {'UGID':1}
                    build = False
                else:
                    new_geom = new_geom.union(element_geom['geom'])
            itr = [{'geom':new_geom,'properties':new_properties,'crs':new_crs}]
            self.ops.geom = itr
Beispiel #4
0
    def test_system_simple(self):
        to_file = os.path.join(env.DIR_OUTPUT, 'test_ocgis_log.log')
        to_stream = False

        ocgis_lh.configure(to_file, to_stream)

        ocgis_lh('a test message')
        subset = ocgis_lh.get_logger('subset')
        subset.info('a subset message')
Beispiel #5
0
    def test_simple(self):
        to_file = os.path.join(env.DIR_OUTPUT, "test_ocgis_log.log")
        to_stream = False

        ocgis_lh.configure(to_file, to_stream)

        ocgis_lh("a test message")
        subset = ocgis_lh.get_logger("subset")
        subset.info("a subset message")
Beispiel #6
0
    def __init__(self, ops, serial=True, nprocs=1, validate=True):
        self.ops = ops
        self.serial = serial
        self.nprocs = nprocs

        subset_log = ocgis_lh.get_logger('subset')

        if validate:
            ocgis_lh('validating request datasets',
                     subset_log,
                     level=logging.DEBUG)
            ops.dataset.validate(ops=ops)

        ## create the calculation engine
        if self.ops.calc is None:
            self.cengine = None
        else:
            ocgis_lh('initializing calculation engine',
                     subset_log,
                     level=logging.DEBUG)
            self.cengine = OcgCalculationEngine(self.ops.calc_grouping,
                                                self.ops.calc,
                                                raw=self.ops.calc_raw,
                                                agg=self.ops.aggregate)

        ## check for snippet request in the operations dictionary. if there is
        ## on, the time range should be set in the operations dictionary.
        if self.ops.snippet is True:
            ##TODO: move snippet to iteration
            ocgis_lh('getting snippet bounds', subset_log)
            for rd in self.ops.dataset:
                ## snippet is not implemented for time regions
                if rd.time_region is not None:
                    exc = NotImplementedError(
                        'snippet is not implemented for time regions')
                    ocgis_lh(exc=exc, logger=subset_log)

                rd.level_range = [1, 1]
                ods = rd.ds
                ## load the first time slice if there is calculation or the
                ## calculation does not use a temporal group.
                if self.cengine is None or (self.cengine is not None
                                            and self.cengine.grouping is None):
                    ##TODO: improve slicing to not load all time values in a more
                    ## elegant way.
                    ods._load_slice.update({'T': slice(0, 1)})
                ## snippet for the computation. this currently requires loading
                ## all the data from the time dimension into memory.
                ##TODO: more efficiently pull dates for monthly grouping (for
                ##example).
                else:
                    ods.temporal.set_grouping(self.cengine.grouping)
                    tgdim = ods.temporal.group
                    times = ods.temporal.value[tgdim.dgroups[0]]
                    rd.time_range = list(
                        ods.temporal.get_datetime([times.min(),
                                                   times.max()]))
Beispiel #7
0
    def test_system_simple(self):
        to_file = os.path.join(env.DIR_OUTPUT, 'test_ocgis_log.log')
        to_stream = False

        ocgis_lh.configure(to_file, to_stream)

        ocgis_lh('a test message')
        subset = ocgis_lh.get_logger('subset')
        subset.info('a subset message')
Beispiel #8
0
 def __init__(self,colls,outdir,prefix,ops=None,add_meta=True):
     self.colls = colls
     self.ops = ops
     self.prefix = prefix
     self.outdir = outdir
     self.add_meta = add_meta
     self._log = ocgis_lh.get_logger('conv')
     
     if self._ext is None:
         self.path = self.outdir
     else:
         self.path = os.path.join(self.outdir,prefix+'.'+self._ext)
     ocgis_lh('converter initialized',level=logging.DEBUG,logger=self._log)
Beispiel #9
0
    def __init__(self,ops,serial=True,nprocs=1,validate=True):
        self.ops = ops
        self.serial = serial
        self.nprocs = nprocs
        
        subset_log = ocgis_lh.get_logger('subset')
        
        if validate:
            ocgis_lh('validating request datasets',subset_log,level=logging.DEBUG)
            ops.dataset.validate(ops=ops)

        ## create the calculation engine
        if self.ops.calc is None:
            self.cengine = None
        else:
            ocgis_lh('initializing calculation engine',subset_log,level=logging.DEBUG)
            self.cengine = OcgCalculationEngine(self.ops.calc_grouping,
                                           self.ops.calc,
                                           raw=self.ops.calc_raw,
                                           agg=self.ops.aggregate)
            
        ## check for snippet request in the operations dictionary. if there is
        ## on, the time range should be set in the operations dictionary.
        if self.ops.snippet is True:
            ##TODO: move snippet to iteration
            ocgis_lh('getting snippet bounds',subset_log)
            for rd in self.ops.dataset:
                ## snippet is not implemented for time regions
                if rd.time_region is not None:
                    exc = NotImplementedError('snippet is not implemented for time regions')
                    ocgis_lh(exc=exc,logger=subset_log)
                
                rd.level_range = [1,1]
                ods = rd.ds
                ## load the first time slice if there is calculation or the 
                ## calculation does not use a temporal group.
                if self.cengine is None or (self.cengine is not None and self.cengine.grouping is None):
                    ##TODO: improve slicing to not load all time values in a more
                    ## elegant way.
                    ods._load_slice.update({'T':slice(0,1)})
                ## snippet for the computation. this currently requires loading
                ## all the data from the time dimension into memory.
                ##TODO: more efficiently pull dates for monthly grouping (for
                ##example).
                else:
                    ods.temporal.set_grouping(self.cengine.grouping)
                    tgdim = ods.temporal.group
                    times = ods.temporal.value[tgdim.dgroups[0]]
                    rd.time_range = list(ods.temporal.get_datetime([times.min(),times.max()]))
Beispiel #10
0
    def __init__(self, prefix=None, outdir=None, overwrite=False):
        self.outdir = outdir
        self.overwrite = overwrite
        self.prefix = prefix

        if self._ext is None:
            self.path = self.outdir
        else:
            self.path = os.path.join(self.outdir, prefix + "." + self._ext)
            if os.path.exists(self.path):
                if not self.overwrite:
                    msg = messages.M3.format(self.path)
                    raise IOError(msg)

        self._log = ocgis_lh.get_logger("conv")
Beispiel #11
0
 def _iter_proc_args_(self):
     ''':rtype: tuple'''
     
     subset_log = ocgis_lh.get_logger('subset')
     ## if there is no geometry, yield None.
     if self.ops.geom is None:
         ocgis_lh('returning entire spatial domain - no selection geometry',subset_log)
         yield(self,None,subset_log)
         
     ## iterator through geometries in the ShpDataset
     elif isinstance(self.ops.geom,ShpDataset):
         ocgis_lh('{0} geometry(s) to process'.format(len(self.ops.geom)),subset_log)
         for geom in self.ops.geom:
             yield(self,geom,subset_log)
             
     ## otherwise, the data is likely a GeometryDataset with a single value.
     ## just return it.
     else:
         ocgis_lh('1 geometry to process'.format(len(self.ops.geom)),subset_log)
         yield(self,self.ops.geom,subset_log)
Beispiel #12
0
    def __init__(self, prefix=None, outdir=None, overwrite=False, options=None):
        self.outdir = outdir
        self.overwrite = overwrite
        self.prefix = prefix

        if options is None:
            self.options = {}
        else:
            self.options = options

        if self._ext is None:
            self.path = self.outdir
        else:
            self.path = os.path.join(self.outdir, prefix + '.' + self._ext)
            if os.path.exists(self.path):
                if not self.overwrite:
                    msg = messages.M3.format(self.path)
                    raise IOError(msg)

        self._log = ocgis_lh.get_logger('conv')
Beispiel #13
0
    def __init__(self, ops, request_base_size_only=False, progress=None):
        self.ops = ops
        self._request_base_size_only = request_base_size_only
        self._subset_log = ocgis_lh.get_logger('subset')
        self._progress = progress or ProgressOcgOperations()
        self._original_subcomm = deepcopy(vm.current_comm_name)
        self._backtransform = {}

        # Create the calculation engine is calculations are present.
        if self.ops.calc is None or self._request_base_size_only:
            self.cengine = None
            self._has_multivariate_calculations = False
        else:
            ocgis_lh('initializing calculation engine', self._subset_log, level=logging.DEBUG)
            self.cengine = CalculationEngine(self.ops.calc_grouping,
                                             self.ops.calc,
                                             calc_sample_size=self.ops.calc_sample_size,
                                             progress=self._progress,
                                             spatial_aggregation=self.ops.aggregate)
            self._has_multivariate_calculations = self.cengine.has_multivariate_functions
Beispiel #14
0
 def __init__(self,colls,outdir,prefix,ops=None,add_meta=True,add_auxiliary_files=True,
              overwrite=False):
     self.colls = colls
     self.ops = ops
     self.prefix = prefix
     self.outdir = outdir
     self.add_meta = add_meta
     self.add_auxiliary_files = add_auxiliary_files
     self.overwrite = overwrite
     self._log = ocgis_lh.get_logger('conv')
     
     if self._ext is None:
         self.path = self.outdir
     else:
         self.path = os.path.join(self.outdir,prefix+'.'+self._ext)
         if os.path.exists(self.path):
             if not self.overwrite:
                 msg = 'Output path exists "{0}" and must be removed before proceeding. Set "overwrite" argument or env.OVERWRITE to True to overwrite.'.format(self.path)
                 ocgis_lh(logger=self._log,exc=IOError(msg))
         
     ocgis_lh('converter initialized',level=logging.DEBUG,logger=self._log)
Beispiel #15
0
    def __init__(self,
                 colls,
                 outdir,
                 prefix,
                 mode='raw',
                 ops=None,
                 add_meta=True):
        self.colls = colls
        self.ops = ops
        self.prefix = prefix
        self.outdir = outdir
        self.mode = mode
        self.add_meta = add_meta
        self._log = ocgis_lh.get_logger('conv')

        if self._ext is None:
            self.path = self.outdir
        else:
            self.path = os.path.join(self.outdir, prefix + '.' + self._ext)
        ocgis_lh('converter initialized',
                 level=logging.DEBUG,
                 logger=self._log)
Beispiel #16
0
    def __init__(self,ops,request_base_size_only=False,progress=None):
        self.ops = ops
        self._request_base_size_only = request_base_size_only
        self._subset_log = ocgis_lh.get_logger('subset')
        self._progress = progress or ProgressOcgOperations()

        ## create the calculation engine
        if self.ops.calc == None or self._request_base_size_only == True:
            self.cengine = None
            self._has_multivariate_calculations = False
        else:
            ocgis_lh('initializing calculation engine',self._subset_log,level=logging.DEBUG)
            self.cengine = OcgCalculationEngine(self.ops.calc_grouping,
                                           self.ops.calc,
                                           raw=self.ops.calc_raw,
                                           agg=self.ops.aggregate,
                                           calc_sample_size=self.ops.calc_sample_size,
                                           progress=self._progress)
            self._has_multivariate_calculations = any([self.cengine._check_calculation_members_(self.cengine.funcs,k) \
             for k in [AbstractMultivariateFunction,MultivariateEvalFunction]])
            
        ## in the case of netcdf output, geometries must be unioned. this is
        ## also true for the case of the selection geometry being requested as
        ## aggregated.
        if (self.ops.output_format == 'nc' or self.ops.agg_selection is True) \
         and self.ops.geom is not None:
            ocgis_lh('aggregating selection geometry',self._subset_log)
            build = True
            for element_geom in self.ops.geom:
                if build:
                    new_geom = element_geom['geom']
                    new_crs = element_geom['crs']
                    new_properties = {'UGID':1}
                    build = False
                else:
                    new_geom = new_geom.union(element_geom['geom'])
            itr = [{'geom':new_geom,'properties':new_properties,'crs':new_crs}]
            self.ops.geom = itr
Beispiel #17
0
    def _iter_proc_args_(self):
        ''':rtype: tuple'''

        subset_log = ocgis_lh.get_logger('subset')
        ## if there is no geometry, yield None.
        if self.ops.geom is None:
            ocgis_lh('returning entire spatial domain - no selection geometry',
                     subset_log)
            yield (self, None, subset_log)

        ## iterator through geometries in the ShpDataset
        elif isinstance(self.ops.geom, ShpDataset):
            ocgis_lh('{0} geometry(s) to process'.format(len(self.ops.geom)),
                     subset_log)
            for geom in self.ops.geom:
                yield (self, geom, subset_log)

        ## otherwise, the data is likely a GeometryDataset with a single value.
        ## just return it.
        else:
            ocgis_lh('1 geometry to process'.format(len(self.ops.geom)),
                     subset_log)
            yield (self, self.ops.geom, subset_log)
Beispiel #18
0
    def __init__(self, ops, request_base_size_only=False, progress=None):
        self.ops = ops
        self._request_base_size_only = request_base_size_only
        self._subset_log = ocgis_lh.get_logger('subset')
        self._progress = progress or ProgressOcgOperations()
        self._original_subcomm = deepcopy(vm.current_comm_name)
        self._backtransform = {}

        # Create the calculation engine is calculations are present.
        if self.ops.calc is None or self._request_base_size_only:
            self.cengine = None
            self._has_multivariate_calculations = False
        else:
            ocgis_lh('initializing calculation engine',
                     self._subset_log,
                     level=logging.DEBUG)
            self.cengine = CalculationEngine(
                self.ops.calc_grouping,
                self.ops.calc,
                calc_sample_size=self.ops.calc_sample_size,
                progress=self._progress,
                spatial_aggregation=self.ops.aggregate)
            self._has_multivariate_calculations = self.cengine.has_multivariate_functions
Beispiel #19
0
    def execute(self):
        ## check for a user-supplied output prefix
        prefix = self.ops.prefix
            
        ## do directory management.
        if self.ops.output_format == 'numpy':
            outdir = None
        else:
            outdir = os.path.join(self.ops.dir_output,prefix)
            if os.path.exists(outdir):
                if env.OVERWRITE:
                    shutil.rmtree(outdir)
                else:
                    raise(IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir)))
            os.mkdir(outdir)
            
        try:
            ## configure logging ###################################################
            
            ## if file logging is enable, perform some logic based on the operational
            ## parameters.
            if env.ENABLE_FILE_LOGGING:
                if self.ops.output_format == 'numpy':
                    to_file = None
                else:
                    to_file = os.path.join(outdir,prefix+'.log')
            else:
                to_file = None
            
            ## flags to determine streaming to console
            if env.VERBOSE:
                to_stream = True
            else:
                to_stream = False
    
            ## configure the logger
            if env.DEBUG:
                level = logging.DEBUG
            else:
                level = logging.INFO
            ocgis_lh.configure(to_file=to_file,to_stream=to_stream,level=level)
            
            ## create local logger
            interpreter_log = ocgis_lh.get_logger('interpreter')
            
            ocgis_lh('executing: {0}'.format(self.ops.prefix),interpreter_log)
            
            ## set up environment ##############################################
                
            self.check() ## run validation - doesn't do much now
                
            ## do not perform vector wrapping for NetCDF output
            if self.ops.output_format == 'nc':
                ocgis_lh('"vector_wrap" set to False for netCDF output',
                         interpreter_log,level=logging.WARN)
                self.ops.vector_wrap = False
    
            ## if the requested output format is "meta" then no operations are run
            ## and only the operations dictionary is required to generate output.
            if self.ops.output_format == 'meta':
                ret = MetaConverter(self.ops).write()
            ## this is the standard request for other output types.
            else:
                ## the operations object performs subsetting and calculations
                ocgis_lh('initializing subset',interpreter_log,level=logging.DEBUG)
                so = SubsetOperation(self.ops,serial=env.SERIAL,nprocs=env.CORES)
                ## if there is no grouping on the output files, a singe converter is
                ## is needed
                if self.ops.output_grouping is None:
                    Conv = OcgConverter.get_converter(self.ops.output_format)
                    ocgis_lh('initializing converter',interpreter_log,
                             level=logging.DEBUG)
                    conv = Conv(so,outdir,prefix,ops=self.ops)
                    ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format),interpreter_log,
                             level=logging.DEBUG)
                    ret = conv.write()
                else:
                    raise(NotImplementedError)
            
            ocgis_lh('execution complete: {0}'.format(self.ops.prefix),interpreter_log)

            return(ret)
        finally:
            ## shut down logging
            ocgis_lh.shutdown()
Beispiel #20
0
    def execute(self):
        # check for a user-supplied output prefix
        prefix = self.ops.prefix

        # do directory management #

        # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is
        # removed.
        made_output_directory = False

        if self.ops.output_format in self._no_directory:
            # no output directory for numpy output
            outdir = None
        else:
            # directories or a single output file(s) is created for the other cases
            if self.ops.add_auxiliary_files:
                # auxiliary files require that a directory be created
                outdir = os.path.join(self.ops.dir_output, prefix)
                if os.path.exists(outdir):
                    if env.OVERWRITE:
                        shutil.rmtree(outdir)
                    else:
                        raise IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir))
                os.mkdir(outdir)
                # on an exception, the output directory needs to be removed
                made_output_directory = True
            else:
                # with no auxiliary files the output directory will do just fine
                outdir = self.ops.dir_output

        try:
            # configure logging ########################################################################################

            progress = self._get_progress_and_configure_logging_(outdir, prefix)

            # create local logger
            interpreter_log = ocgis_lh.get_logger('interpreter')

            ocgis_lh('Initializing...', interpreter_log)

            # set up environment #######################################################################################

            # run validation - doesn't do much now
            self.check()

            # do not perform vector wrapping for NetCDF output
            if self.ops.output_format == 'nc':
                ocgis_lh('"vector_wrap" set to False for netCDF output',
                         interpreter_log, level=logging.WARN)
                self.ops.vector_wrap = False

            # if the requested output format is "meta" then no operations are run and only the operations dictionary is
            # required to generate output.
            Converter = self.ops._get_object_(OutputFormat.name).get_converter_class()
            if issubclass(Converter, AbstractMetaConverter):
                ret = Converter(self.ops).write()
            # this is the standard request for other output types.
            else:
                # the operations object performs subsetting and calculations
                ocgis_lh('initializing subset', interpreter_log, level=logging.DEBUG)
                so = SubsetOperation(self.ops, progress=progress)
                # if there is no grouping on the output files, a singe converter is is needed
                if self.ops.output_grouping is None:
                    ocgis_lh('initializing converter', interpreter_log, level=logging.DEBUG)
                    conv = self._get_converter_(Converter, outdir, prefix, so)
                    ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format), interpreter_log,
                             level=logging.DEBUG)
                    ret = conv.write()
                else:
                    raise NotImplementedError

            ocgis_lh('Operations successful.'.format(self.ops.prefix), interpreter_log)

            return ret
        except:
            # The output directory needs to be removed if one was created. Shutdown logging before to make sure there
            # is no file lock (Windows).
            ocgis_lh.shutdown()
            if made_output_directory:
                shutil.rmtree(outdir)
            raise
        finally:
            ocgis_lh.shutdown()
Beispiel #21
0
    def execute(self):
        # check for a user-supplied output prefix
        prefix = self.ops.prefix

        # do directory management #

        # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is
        # removed.
        made_output_directory = False

        if self.ops.output_format in self._no_directory:
            # No output directory for some formats.
            outdir = None
        else:
            # Directories or a single output file(s) is created for the other cases.
            if self.ops.add_auxiliary_files:
                # Auxiliary files require that a directory be created.
                outdir = os.path.join(self.ops.dir_output, prefix)
                # Create and/or remove the output directory.
                if vm.rank == 0:
                    if os.path.exists(outdir):
                        if env.OVERWRITE:
                            shutil.rmtree(outdir)
                        else:
                            raise IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir))
                    os.mkdir(outdir)
                # Block until output directory is created. Most often the zero rank manages writing, but this is not a
                # requirement.
                vm.Barrier()
                # On an exception, the output directory needs to be removed.
                made_output_directory = True
            else:
                # with no auxiliary files the output directory will do just fine
                outdir = self.ops.dir_output

        try:
            # configure logging ########################################################################################

            progress = self._get_progress_and_configure_logging_(outdir, prefix)

            # create local logger
            interpreter_log = ocgis_lh.get_logger('interpreter')

            ocgis_lh('Initializing...', interpreter_log)

            # set up environment #######################################################################################

            # run validation - doesn't do much now
            self.check()

            # do not perform vector wrapping for NetCDF output
            if self.ops.output_format == 'nc':
                ocgis_lh('"vector_wrap" set to False for netCDF output',
                         interpreter_log, level=logging.WARN)
                self.ops.vector_wrap = False

            # if the requested output format is "meta" then no operations are run and only the operations dictionary is
            # required to generate output.
            Converter = self.ops._get_object_(OutputFormat.name).get_converter_class()
            if issubclass(Converter, AbstractMetaConverter):
                ret = Converter(self.ops).write()
            # this is the standard request for other output types.
            else:
                # the operations object performs subsetting and calculations
                ocgis_lh('initializing subset', interpreter_log, level=logging.DEBUG)
                so = OperationsEngine(self.ops, progress=progress)
                # if there is no grouping on the output files, a singe converter is needed
                if self.ops.output_grouping is None:
                    ocgis_lh('initializing converter', interpreter_log, level=logging.DEBUG)
                    conv = self._get_converter_(Converter, outdir, prefix, so)
                    ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format), interpreter_log,
                             level=logging.DEBUG)
                    ret = conv.write()
                else:
                    raise NotImplementedError

            ocgis_lh('Operations successful.'.format(self.ops.prefix), interpreter_log)

            return ret
        except:
            # The output directory needs to be removed if one was created. Shutdown logging before to make sure there
            # is no file lock (Windows).
            ocgis_lh.shutdown()
            if vm.rank == 0 and made_output_directory:
                shutil.rmtree(outdir)
            raise
        finally:
            ocgis_lh.shutdown()

            if env.ADD_OPS_MPI_BARRIER:
                vm.Barrier()
Beispiel #22
0
    def execute(self):
        ## check for a user-supplied output prefix
        prefix = self.ops.prefix
            
        # do directory management #

        # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is removed.
        made_output_directory = False

        if self.ops.output_format == 'numpy':
            # no output directory for numpy output
            outdir = None
        else:
            # directories or a single output file(s) is created for the other cases
            if self.ops.add_auxiliary_files:
                # auxiliary files require that a directory be created
                outdir = os.path.join(self.ops.dir_output,prefix)
                if os.path.exists(outdir):
                    if env.OVERWRITE:
                        shutil.rmtree(outdir)
                    else:
                        raise(IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir)))
                os.mkdir(outdir)
                # on an exception, the output directory needs to be removed
                made_output_directory = True
            else:
                # with no auxiliary files the output directory will do just fine
                outdir = self.ops.dir_output

        try:
            ## configure logging ###################################################
            
            ## if file logging is enable, perform some logic based on the operational
            ## parameters.
            if env.ENABLE_FILE_LOGGING and self.ops.add_auxiliary_files == True:
                if self.ops.output_format == 'numpy':
                    to_file = None
                else:
                    to_file = os.path.join(outdir,prefix+'.log')
            else:
                to_file = None
            
            ## flags to determine streaming to console
            if env.VERBOSE:
                to_stream = True
            else:
                to_stream = False
    
            ## configure the logger
            if env.DEBUG:
                level = logging.DEBUG
            else:
                level = logging.INFO
            ## this wraps the callback function with methods to capture the
            ## completion of major operations.
            progress = ProgressOcgOperations(callback=self.ops.callback)
            ocgis_lh.configure(to_file=to_file,to_stream=to_stream,level=level,
                               callback=progress,callback_level=level)
            
            ## create local logger
            interpreter_log = ocgis_lh.get_logger('interpreter')
            
            ocgis_lh('Initializing...',interpreter_log)
            
            ## set up environment ##############################################
                
            self.check() ## run validation - doesn't do much now
                
            ## do not perform vector wrapping for NetCDF output
            if self.ops.output_format == 'nc':
                ocgis_lh('"vector_wrap" set to False for netCDF output',
                         interpreter_log,level=logging.WARN)
                self.ops.vector_wrap = False
    
            ## if the requested output format is "meta" then no operations are run
            ## and only the operations dictionary is required to generate output.
            if self.ops.output_format == 'meta':
                ret = MetaConverter(self.ops).write()
            ## this is the standard request for other output types.
            else:
                ## the operations object performs subsetting and calculations
                ocgis_lh('initializing subset',interpreter_log,level=logging.DEBUG)
                so = SubsetOperation(self.ops,progress=progress)
                ## if there is no grouping on the output files, a singe converter is
                ## is needed
                if self.ops.output_grouping is None:
                    Conv = AbstractConverter.get_converter(self.ops.output_format)
                    ocgis_lh('initializing converter',interpreter_log,
                             level=logging.DEBUG)
                    conv = Conv(so,outdir,prefix,ops=self.ops,add_auxiliary_files=self.ops.add_auxiliary_files,
                                overwrite=env.OVERWRITE)
                    ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format),interpreter_log,
                             level=logging.DEBUG)
                    ret = conv.write()
                else:
                    raise(NotImplementedError)
            
            ocgis_lh('Operations successful.'.format(self.ops.prefix),interpreter_log)

            return ret
        except:
            # on an exception, the output directory needs to be removed if one was created. once the output directory is
            # removed, reraise.
            if made_output_directory:
                shutil.rmtree(outdir)
            raise
        finally:
            ## shut down logging
            ocgis_lh.shutdown()