Esempio n. 1
0
    def hacFilter(self, scan, quant="DBZH", enough=100):
        NOD = odim_source.NODfromSource(scan)

        # If HAC files are missing, then this method will passively fail.
        try:
            self.readHac(hacFile(scan, lastmonth=True))

            if self.hac.getAttribute("how/count") < enough:
                raise ValueError, "Not enough hits in climatology for %s" % NOD

            hac_data = self.hac.getData()
            if hac_data.shape != (scan.nrays, scan.nbins):
                print hac_data.shape, (scan.nrays, scan.nbins)
                raise IOError, "Scan and HAC have different geometries for %s" % NOD

            ## Get site-specific threshold!
            try:
                self.thresh = ARGS[NOD].thresh
            except KeyError:
                self.thresh = ARGS["default"].thresh
            ## Got site-specific threshold?

            qind = _ravefield.new()
            qind.setData(zeros(hac_data.shape, uint8))
            qind.addAttribute("how/task", "eu.opera.odc.hac")
            qind.addAttribute("how/task_args", self.thresh)
            scan.addQualityField(qind)

            _odc_hac.hacFilter(scan, self.hac, quant)
        except Exception, e:
            print traceback.format_exc()
Esempio n. 2
0
    def create_filename(self, pobj):
        #_polarscan.isPolarScan(obj) and not _polarvolume.isPolarVolume(obj):
        if _polarvolume.isPolarVolume(pobj):
            ptype = "pvol"
        elif _polarscan.isPolarScan(pobj):
            ptype = "scan"
        else:
            try:
                ptype = pobj.getAttribute("what/object").tolower()
            except:
                ptype = "unknowntype"
        src = odim_source.NODfromSource(pobj)
        dstr = "19700101"
        tstr = "000000"
        try:
            dstr = pobj.date
            tstr = pobj.time
        except:
            pass

        t = tempfile.mkstemp(prefix="%s_%s_%s_%s_" % (ptype, src, dstr, tstr),
                             suffix=".h5",
                             dir=self.dumppath)
        os.close(t[0])
        return t[1]
Esempio n. 3
0
    def fetch_objects(self):
        nodes = ""
        objects = {}
        tasks = []
        for fname in self.filenames:
            obj = None
            try:
                if self.ravebdb != None:
                    obj = self.ravebdb.get_rave_object(fname)
                else:
                    obj = _raveio.open(fname).object
            except IOError:
                self.logger.exception("Failed to open %s" % fname)

            is_scan = _polarscan.isPolarScan(obj)
            if is_scan:
                is_pvol = False
            else:
                is_pvol = _polarvolume.isPolarVolume(obj)

            if not is_scan and not is_pvol:
                self.logger.info(
                    "Input file %s is neither polar scan or volume, ignoring."
                    % fname)
                continue

            if self.ignore_malfunc:
                obj = rave_util.remove_malfunc(obj)
                if obj is None:
                    continue

            node = odim_source.NODfromSource(obj)

            if len(nodes):
                nodes += ",'%s'" % node
            else:
                nodes += "'%s'" % node

            objects[fname] = obj

            if is_scan:
                self.logger.debug(
                    "Scan used in composite generation - UUID: %s, Node: %s, Nominal date and time: %sT%s",
                    fname, node, obj.date, obj.time)
                self.add_how_task_from_scan(obj, tasks)
            elif is_pvol:
                self.logger.debug(
                    "PVOL used in composite generation - UUID: %s, Node: %s, Nominal date and time: %sT%s",
                    fname, node, obj.date, obj.time)
                for i in range(obj.getNumberOfScans()):
                    scan = obj.getScan(i)
                    self.add_how_task_from_scan(scan, tasks)

        how_tasks = ",".join(tasks)

        return objects, nodes, how_tasks
    def process(self, obj, reprocess_quality_flag=True, arguments=None):
        #_rave.setDebugLevel(_rave.Debug_RAVE_DEBUG)

        nod = odim_source.NODfromSource(obj)

        # Canadian S-band data use the default kernel size, NEXRAD larger
        if nod[:3] == 'cas':
            ec_drqc.drQC(obj)
        else:
            ec_drqc.drQC(obj, kernelx=3)

        return obj
Esempio n. 5
0
def hacFile(scan, lastmonth=False, nod=True):
    NOD = odim_source.NODfromSource(scan)
    if not nod:
        CCCC = odim_source.CCCC[NOD]
        RAD = odim_source.RAD[NOD][2:]
    elangle = str(int(round(scan.elangle * rd * 10) * 10)).zfill(5)
    rays = str(scan.nrays).zfill(4)
    bins = str(scan.nbins).zfill(4)

    YYYYMM = scan.date[:6]
    if lastmonth == True:
        YYYYMM = lastMonth(YYYYMM)

    if nod:
        return HACDATA + "/%s_%s_%s_%sx%s_hit-accum.hdf" % (
            YYYYMM, NOD, elangle, rays, bins)

    else:
        return HACDATA + "/%s_%s_%s_%s_%sx%s_hit-accum.hdf" % (
            YYYYMM, CCCC, RAD, elangle, rays, bins)
Esempio n. 6
0
    def hacIncrement(self, scan, quant="DBZH"):
        NOD = odim_source.NODfromSource(scan)
        hacfile = hacFile(scan)

        try:
            try:
                self.readHac(hacfile)
            except IOError:
                self.makeHac(hacfile, scan.nrays, scan.nbins)

            hac_data = self.hac.getData()
            if hac_data.shape != (scan.nrays, scan.nbins):
                print hac_data.shape, (scan.nrays, scan.nbins)
                raise IOError, "Scan and HAC have different geometries for %s" % NOD

            _odc_hac.hacIncrement(scan, self.hac, quant)

            self.writeHac(hacfile)
        except IOError:
            pass
Esempio n. 7
0
    def process(self,
                obj,
                reprocess_quality_flag=True,
                quality_control_mode=QUALITY_CONTROL_MODE_ANALYZE_AND_APPLY,
                arguments=None):
        src = odim_source.NODfromSource(obj)
        try:
            chain = self.chain_registry.get_chain(src)
        except LookupError:
            return obj, []

        algorithm = None
        qfields = []
        for link in chain.links():
            p = rave_pgf_quality_registry.get_plugin(link.refname())
            if p != None:
                try:
                    if link.arguments() is not None:
                        newargs = {}
                        if arguments != None:
                            newargs.update(arguments)
                        newargs.update(link.arguments())
                        obj, plugin_qfield = p.process(obj,
                                                       reprocess_quality_flag,
                                                       quality_control_mode,
                                                       newargs)
                    else:
                        obj, plugin_qfield = p.process(obj,
                                                       reprocess_quality_flag,
                                                       quality_control_mode,
                                                       arguments)
                    na = p.algorithm()
                    qfields += plugin_qfield
                    if algorithm == None and na != None:  # Try to get the generator algorithm != None
                        algorithm = na
                except Exception:
                    logger.exception("Caught exception when processing object")

        return obj, qfields
Esempio n. 8
0
 def testNODfromSource(self):
     rio = _raveio.open(self.FIXTURE)
     n = odim_source.NODfromSource(rio.object)
     self.assertEqual(n, 'sella')
Esempio n. 9
0
    def fetch_objects(self):
        nodes = ""
        objects = {}
        tasks = []
        malfunc_files = 0

        preload_quantity = None
        if self.use_lazy_loading and self.use_lazy_loading_preloads:
            preload_quantity = self.quantity

        for fname in self.filenames:
            obj = None
            try:
                if self.ravebdb != None:
                    obj = self.ravebdb.get_rave_object(fname,
                                                       self.use_lazy_loading,
                                                       preload_quantity)
                else:
                    obj = _raveio.open(fname, self.use_lazy_loading,
                                       preload_quantity).object  #self.quantity
            except IOError:
                self.logger.exception("Failed to open %s", fname)

            is_scan = _polarscan.isPolarScan(obj)
            if is_scan:
                is_pvol = False
            else:
                is_pvol = _polarvolume.isPolarVolume(obj)

            if not is_scan and not is_pvol:
                self.logger.warn(
                    "Input file %s is neither polar scan or volume, ignoring.",
                    fname)
                continue

            # Force azimuthal nav information usage if requested
            obj.use_azimuthal_nav_information = self.use_azimuthal_nav_information

            if self.ignore_malfunc:
                obj = rave_util.remove_malfunc(obj)
                if obj is None:
                    self.logger.info(
                        "Input file %s detected as 'malfunc', ignoring.",
                        fname)
                    malfunc_files += 1
                    continue

            node = odim_source.NODfromSource(obj)

            if len(nodes):
                nodes += ",'%s'" % node
            else:
                nodes += "'%s'" % node

            objects[fname] = obj

            if is_scan:
                self.logger.debug(
                    "Scan used in composite generation - UUID: %s, Node: %s, Nominal date and time: %sT%s",
                    fname, node, obj.date, obj.time)
                self.add_how_task_from_scan(obj, tasks)
            elif is_pvol:
                self.logger.debug(
                    "PVOL used in composite generation - UUID: %s, Node: %s, Nominal date and time: %sT%s",
                    fname, node, obj.date, obj.time)
                for i in range(obj.getNumberOfScans()):
                    scan = obj.getScan(i)
                    self.add_how_task_from_scan(scan, tasks)

        how_tasks = ",".join(tasks)

        all_files_malfunc = (len(self.filenames) > 0
                             and malfunc_files == len(self.filenames))

        return objects, nodes, how_tasks, all_files_malfunc
Esempio n. 10
0
    def _generate(self, dd, dt, area=None):
        self._debug_generate_info(area)

        if self.verbose:
            self.logger.info("Fetching objects and applying quality plugins")

        self.logger.debug(
            "Generating composite with date and time %sT%s for area %s", dd,
            dt, area)

        objects, nodes, how_tasks, all_files_malfunc = self.fetch_objects()

        if all_files_malfunc:
            self.logger.info(
                "Content of all provided files were marked as 'malfunc'. Since option 'ignore_malfunc' is set, no composite is generated!"
            )
            return None

        objects, algorithm, qfields = self.quality_control_objects(objects)

        self.logger.debug("Quality controls for composite generation: %s",
                          (",".join(qfields)))

        if len(objects) == 0:
            self.logger.info(
                "No objects provided to the composite generator. No composite will be generated!"
            )
            return None

        objects = list(objects.values())

        if self.dump:
            self._dump_objects(objects)

        generator = _pycomposite.new()
        if area is not None:
            if _area.isArea(area):
                pyarea = area
            else:
                pyarea = my_area_registry.getarea(area)
        else:
            if self.verbose:
                self.logger.info("Determining best fit for area")
            A = rave_area.MakeAreaFromPolarObjects(objects, self.pcsid,
                                                   self.xscale, self.yscale)

            pyarea = _area.new()
            pyarea.id = "auto-generated best-fit"
            pyarea.xsize = A.xsize
            pyarea.ysize = A.ysize
            pyarea.xscale = A.xscale
            pyarea.yscale = A.yscale
            pyarea.extent = A.extent
            pcs = rave_projection.pcs(A.pcs)
            pcsname = pcs.name
            if not is_py27:
                pcsname = pcsname.decode()
            pyarea.projection = _projection.new(pcs.id, pcsname,
                                                ' '.join(pcs.definition))

            if len(objects) == 1:
                try:
                    tmpid = odim_source.NODfromSource(objects[0])
                    pyarea.id = "auto_%s_%s" % (A.pcs, tmpid)
                except:
                    pass

        generator.addParameter(self.quantity, self.gain, self.offset,
                               self.minvalue)
        generator.product = self.product
        if algorithm is not None:
            generator.algorithm = algorithm

        for o in objects:
            generator.add(o)
            # We want to ensure that we get a proper indexing of included radar
            sourceid = o.source
            try:
                osource = odim_source.ODIM_Source(o.source)
                if osource.wmo:
                    sourceid = "WMO:%s" % osource.wmo
                elif osource.rad:
                    sourceid = "RAD:%s" % osource.rad
                elif osource.nod:
                    sourceid = "NOD:%s" % osource.nod
            except:
                pass

            if not sourceid in self.radar_index_mapping.keys():
                self.radar_index_mapping[sourceid] = self.get_next_radar_index(
                )

        generator.selection_method = self.selection_method
        generator.interpolation_method = self.interpolation_method
        generator.date = o.date if dd is None else dd
        generator.time = o.time if dt is None else dt
        generator.height = self.height
        generator.elangle = self.elangle
        generator.range = self.range

        if self.qitotal_field is not None:
            generator.quality_indicator_field_name = self.qitotal_field

        if self.prodpar is not None:
            self._update_generator_with_prodpar(generator)

        if self.verbose:
            self.logger.info("Generating cartesian composite")

        generator.applyRadarIndexMapping(self.radar_index_mapping)

        result = generator.generate(pyarea, qfields)

        if self.applyctfilter:
            if self.verbose:
                self.logger.debug("Applying ct filter")
            rave_ctfilter.ctFilter(result, self.quantity)

        if self.applygra:
            if not "se.smhi.composite.distance.radar" in qfields:
                self.logger.info(
                    "Trying to apply GRA analysis without specifying a quality plugin specifying the se.smhi.composite.distance.radar q-field, disabling..."
                )
            else:
                if self.verbose:
                    self.logger.info(
                        "Applying GRA analysis (ZR A = %f, ZR b = %f)" %
                        (self.zr_A, self.zr_b))
                grafield = self._apply_gra(result, dd, dt)
                if grafield:
                    result.addParameter(grafield)
                else:
                    self.logger.warn("Failed to generate gra field....")

        # Hack to create a BRDR field if the qfields contains se.smhi.composite.index.radar
        if "se.smhi.composite.index.radar" in qfields:
            bitmapgen = _bitmapgenerator.new()
            brdr_field = bitmapgen.create_intersect(
                result.getParameter(self.quantity),
                "se.smhi.composite.index.radar")
            brdr_param = result.createParameter("BRDR",
                                                _rave.RaveDataType_UCHAR)
            brdr_param.setData(brdr_field.getData())

        if self.applygapfilling:
            if self.verbose:
                self.logger.debug("Applying gap filling")
            t = _transform.new()
            gap_filled = t.fillGap(result)
            result.getParameter(self.quantity).setData(
                gap_filled.getParameter(self.quantity).getData())

        # Fix so that we get a valid place for /what/source and /how/nodes
        plc = result.source
        result.source = "%s,CMT:%s" % (CENTER_ID, plc)
        result.addAttribute('how/nodes', nodes)
        if self.use_site_source and len(objects) == 1:
            try:
                result.source = objects[0].source
                if result.source.find("NOD:") == -1:
                    tmpid = odim_source.NODfromSource(objects[0])
                    result.source = "%s,NOD:%s,CMT:%s" % (
                        self.remove_CMT_from_source(result.source), tmpid, plc)
                else:
                    result.source = "%s,CMT:%s" % (self.remove_CMT_from_source(
                        result.source), plc)
            except:
                self.logger.exception("Failed to get source from object")

        if how_tasks != "":
            result.addAttribute('how/task', how_tasks)

        if self.verbose:
            self.logger.debug("Returning resulting composite image")

        return result
Esempio n. 11
0
def calculate_gra_coefficient(distancefield, interval, adjustmentfile, etime,
                              edate, acrrproduct, db):
    matcher = obsmatcher.obsmatcher(db)
    logger.info("rave_pgf_gra_plugin: Matching observations")
    points = matcher.match(acrrproduct,
                           acc_period=interval,
                           quantity="ACRR",
                           how_task=distancefield)
    if len(points) == 0:
        logger.warn("Could not find any matching observations")
    else:
        logger.info(
            "Matched %d points between acrr product and observation db" %
            len(points))
        db.merge(points)
    d = acrrproduct.date
    t = acrrproduct.time
    tlimit = datetime.datetime(int(d[:4]), int(d[4:6]), int(d[6:8]),
                               int(t[0:2]), int(t[2:4]), int(t[4:6]))
    tlimit = tlimit - datetime.timedelta(hours=interval * MERGETERMS)
    dlimit = datetime.datetime(int(d[:4]), int(d[4:6]), int(d[6:8]),
                               int(t[0:2]), int(t[2:4]), int(t[4:6]))
    dlimit = dlimit - datetime.timedelta(hours=12 * MERGETERMS)
    logger.info("rave_pgf_gra_plugin: Deleting old observations")
    db.delete_grapoints(
        dlimit
    )  # We don't want any points older than 12 hour * MERGETERMS back in time
    points = db.get_grapoints(
        tlimit
    )  # Get all gra points newer than interval*MERGETERMS hours back in time
    logger.info(
        "Using %d number of points for calculating the gra coefficients" %
        len(points))
    generate_backup_coeff = False
    if len(points) > 2:
        try:
            if adjustmentfile != None:
                logger.info(
                    "rave_pgf_gra_plugin: Performing gra coefficient generation with adjustmentfile and %d points"
                    % len(points))
                significant, npoints, loss, r, sig, corr_coeff, a, b, c, m, dev = gra.generate(
                    points, edate, etime, adjustmentfile)
            else:
                logger.info(
                    "rave_pgf_gra_plugin: Performing gra coefficient generation with %d points"
                    % len(points))
                significant, npoints, loss, r, sig, corr_coeff, a, b, c, m, dev = gra.generate(
                    points, edate, etime)
            logger.info(
                "rave_pgf_gra_plugin: Finished performing gra coefficient generation"
            )
            if math.isnan(a) or math.isnan(b) or math.isnan(c):
                logger.error("A/B or C for %s %s is not a number: %f,%f,%f" %
                             (acrrproduct.date, acrrproduct.time, a, b, c))
                return
        except Exception:
            logger.exception("Failed during gra coefficients generation")
            return
    else:
        return

    logger.info("rave_pgf_gra_plugin: Getting NOD source for acrr product")

    # If we come here, store the coefficients in the database so that we can search for them when applying the coefficients
    NOD = odim_source.NODfromSource(acrrproduct)
    if not NOD:
        NOD = ""

    logger.info("rave_pgf_gra_plugin: Merging gra coefficients")
    grac = gra_coefficient(NOD, acrrproduct.date, acrrproduct.time,
                           significant, npoints, loss, r, sig, corr_coeff, a,
                           b, c, float(m), float(dev))
    db.merge(grac)
    logger.info("rave_pgf_gra_plugin: Coefficients merged")
Esempio n. 12
0
def getNod(src):
    return odim_source.NODfromSource(src)
Esempio n. 13
0
    def _generate(self, dd, dt, area=None):
        self._debug_generate_info(area)

        if self.verbose:
            self.logger.info("Fetching objects and applying quality plugins")

        self.logger.debug(
            "Generating composite with date and time %sT%s for area %s", dd,
            dt, area)

        objects, nodes, how_tasks = self.fetch_objects()

        objects, algorithm, qfields = self.quality_control_objects(objects)

        objects = objects.values()

        if self.dump:
            self._dump_objects(objects)

        generator = _pycomposite.new()
        if area is not None:
            if _area.isArea(area):
                pyarea = area
            else:
                pyarea = my_area_registry.getarea(area)
        else:
            if self.verbose:
                self.logger.info("Determining best fit for area")
            A = rave_area.MakeAreaFromPolarObjects(objects, self.pcsid,
                                                   self.xscale, self.yscale)

            pyarea = _area.new()
            pyarea.id = "auto-generated best-fit"
            pyarea.xsize = A.xsize
            pyarea.ysize = A.ysize
            pyarea.xscale = A.xscale
            pyarea.yscale = A.yscale
            pyarea.extent = A.extent
            pcs = rave_projection.pcs(A.pcs)
            pyarea.projection = _projection.new(
                pcs.id, pcs.name, string.join(pcs.definition, ' '))

            if len(objects) == 1:
                try:
                    tmpid = odim_source.NODfromSource(objects[0])
                    pyarea.id = "auto_%s_%s" % (A.pcs, tmpid)
                except:
                    pass

        if type(self.quantity) is types.StringType:
            generator.addParameter(self.quantity, self.gain, self.offset)
        else:
            for quantity in self.quantity:
                generator.addParameter(quantity, self.gain, self.offset)
        generator.product = self.product
        if algorithm is not None:
            generator.algorithm = algorithm

        if len(objects) == 0:
            self.logger.info("No objects provided to the composite generator.")
            if dd is None or dt is None:
                self.logger.error(
                    "Can not create a composite without specifying a valid date / time when no objects are provided."
                )
                raise Exception, "Can not create a composite without specifying a valid date / time when no objects are provided."

        for o in objects:
            generator.add(o)

        generator.selection_method = self.selection_method
        generator.date = o.date if dd is None else dd
        generator.time = o.time if dt is None else dt
        generator.height = self.height
        generator.elangle = self.elangle
        generator.range = self.range

        if self.qitotal_field is not None:
            generator.quality_indicator_field_name = self.qitotal_field

        if self.prodpar is not None:
            self._update_generator_with_prodpar(generator)

        if self.verbose:
            self.logger.info("Generating cartesian composite")
        result = generator.nearest(pyarea, qfields)

        if self.applyctfilter:
            if self.verbose:
                self.logger.debug("Applying ct filter")
            ret = rave_ctfilter.ctFilter(result, self.quantity)

        if self.applygra:
            if not "se.smhi.composite.distance.radar" in qfields:
                self.logger.info(
                    "Trying to apply GRA analysis without specifying a quality plugin specifying the se.smhi.composite.distance.radar q-field, disabling..."
                )
            else:
                if self.verbose:
                    self.logger.info(
                        "Applying GRA analysis (ZR A = %f, ZR b = %f)" %
                        (self.zr_A, self.zr_b))
                grafield = self._apply_gra(result, dd, dt)
                if grafield:
                    result.addParameter(grafield)
                else:
                    self.logger.warn("Failed to generate gra field....")

        if self.applygapfilling:
            if self.verbose:
                self.logger.debug("Applying gap filling")
            t = _transform.new()
            gap_filled = t.fillGap(result)
            result.getParameter(self.quantity).setData(
                gap_filled.getParameter(self.quantity).getData())

        # Fix so that we get a valid place for /what/source and /how/nodes
        plc = result.source
        result.source = "%s,CMT:%s" % (CENTER_ID, plc)
        result.addAttribute('how/nodes', nodes)
        if self.use_site_source and len(objects) == 1:
            try:
                result.source = objects[0].source
                if result.source.find("NOD:") == -1:
                    tmpid = odim_source.NODfromSource(objects[0])
                    result.source = "%s,NOD:%s,CMT:%s" % (
                        self.remove_CMT_from_source(result.source), tmpid, plc)
                else:
                    result.source = "%s,CMT:%s" % (self.remove_CMT_from_source(
                        result.source), plc)
            except:
                self.logger.exception("Failed to get source from object")

        if how_tasks != "":
            result.addAttribute('how/task', how_tasks)

        if self.verbose:
            self.logger.debug("Returning resulting composite image")

        return result
Esempio n. 14
0
def generate(fstr, overwrite=False, trim=True):
    try:
        # Make output filename by adding qc_ prefix if we don't overwrite the original file
        if overwrite:
            ofstr = fstr
        else:
            path, filename = os.path.split(fstr)
            s = filename.split('_')
            s.insert(1, 'qc')
            filename = '_'.join(s)
            ofstr = os.path.join(path, filename)
        #if os.path.isfile(ofstr):
        #    return ofstr, "Already OK"

        #print fstr
        rio = _raveio.open(fstr)
        pvol = rio.object

        # Check that we have TH and DBZH for each scan
        # Don't trust DOPVOL-filtered DBZH. Overwrite it.
        nod = odim_source.NODfromSource(pvol)
        if nod[:2] == 'ca':
            if nod != 'cawmn':
                checkDBZH(pvol, False)  # True to overwrite, False to keep
            else:
                checkDBZH(pvol, False)
        else:
            checkDBZH(pvol, False)
        if _polarvolume.isPolarVolume(pvol):  # Don't check scans
            sanityChecks(pvol)  # Sanity checks

        # Process
        if nod[:2] == 'ca':
            if nod == 'cawmn':
                # McGill S-band - already QC:ed but how?
                algorithm_ids = [
                    "beamb", "radvol-att", "radvol-broad", "qi-total"
                ]
            else:
                # RUAD C-band
                pvol.beamwidth = pvol.getAttribute(
                    'how/beamwH') * dr  # Why is this necessary?
                algorithm_ids = [
                    "hac-filter", "ropo", "beamb", "radvol-att",
                    "radvol-broad", "qi-total"
                ]

        else:
            # NEXRAD S-band
            if not pvol.hasAttribute('how/wavelength'):
                pvol.addAttribute(
                    'how/wavelength',
                    10.0)  # Attribute might not always be available
            algorithm_ids = [
                "drqc", "beamb", "radvol-att", "radvol-broad", "qi-total"
            ]
            if trim:
                rio.object = pvol
                rio = ecWxR_trimRange.generate(
                    ofstr,
                    RIO=rio)  # ofstr is dummy because it doesn't exist yet
                pvol = rio.object

        odc_polarQC.algorithm_ids = algorithm_ids
        pvol.addAttribute('how/algorithm_ids', string.join(algorithm_ids, ","))

        pvol = odc_polarQC.QC(pvol)

        rio.object = pvol
        rio.save(ofstr)

    except Exception, e:
        return fstr, traceback.format_exc()