Example #1
0
def moke_consumnes():
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    moke_us_path = calsim_path("I504")
    moke_ref = findpath(f, moke_us_path)
    if (moke_ref and len(moke_ref) > 0):
        print "Upstream mokelumne flow found, not calculated"

    consumnes_path = calsim_path("C501")
    moke_ds_path = calsim_path("C504")
    consumnes_ref = findpath(f, consumnes_path)
    if not consumnes_ref:
        raise "Consumnes path %s not found" % consumnes_path
    moke_ds_ref = findpath(f, moke_ds_path)
    if not moke_ds_ref:
        raise "Mokulemne downstream path %s not found" % moke_ds_path
    consumnes = DataReference.create(consumnes_ref[0], tw).getData()
    moke_ds = DataReference.create(moke_ds_ref[0], tw).getData()
    mf = calsim_study_fpart(modify=1)
    moke_us_path = calsim_path("I504", mf)
    moke_us = moke_ds - consumnes

    writedss(outfile, moke_us_path, moke_us)
    return
Example #2
0
def txfr_flow_day(nodes_to_txfr_day):
    """ Unsmoothed transfer from CALSIM file to model input file.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    for calsimname in nodes_to_txfr_day:  # Extend the list as needed
        mf = calsim_study_fpart(modify=1)
        dsspath = calsim_path(calsimname)
        dsspath1 = calsim_path(calsimname, modified_fpart=mf)
        processedpath = dsspath1.replace("1MON", "1DAY")
        print dsspath
        print processedpath
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()
        daily = interpolate(monthly, "1DAY")
        if daily:
            writedss(outfile, processedpath, daily)
        else:
            raise "Failure to find CALSIM input data for: " + calsimname
def transfer_flow(nodes_to_transfer):
    """ Unsmoothed transfer from CALSIM file to model input file.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    for calsimname in nodes_to_transfer:  # Extend the list as needed, but please keep in mind the
        # limitations of the conservative spline, at least at present.
        # Mainly, input flows should be substantially greater than
        # zero at all times (yolo would be inappropriate, for instance)
        dsspath = calsim_path(calsimname)
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()
        mf = calsim_study_fpart(modify=1)
        dsspath = calsim_path(calsimname, modified_fpart=mf)
        if monthly:
            writedss(outfile, dsspath, monthly)

        else:
            raise "Failure to find CALSIM input data for: " + calsimname
def extend_flow(nodes_to_extend):
    """ Copying WY1922 data to WY1921 for allowing to preprocessing and running DSM2
        from 01Jan1921.
    """
    calsimfile=getAttr("CALSIMFILE") 
    f=opendss(calsimfile)           # open CALSIM file
    outfile=getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"    
    tw=timewindow("01OCT1921 0000 - 01OCT1922 0000")

    for calsimname in nodes_to_extend:    
        print calsimname
        dsspath = calsim_path(calsimname)
        paths = findpath(f,dsspath)
        if not paths or len(paths)>1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref=DataReference.create(paths[0],tw)
        monthly=ref.getData()

        itr = monthly.getIterator()
        d=zeros(len(monthly),'d')
        count=0
        while not itr.atEnd():
           el = itr.getElement()
           d[count] = el.getY()
           count = count + 1
           itr.advance()
        stime = "01OCT1920 0000" 
        rts = RegularTimeSeries(monthly.getName(),stime, \
              timeinterval("1MON").toString(), d, None, monthly.getAttributes())
        writedss(calsimfile,ref.getPathname().toString(),rts)
Example #5
0
def prep_vamp_ndo(calsimfile, outdss, fpart):

    STEP = string.lower(config.getAttr('CALSIMSTEP'))

    # CALSIM=opendss(calsimfile)
    SJR_PROCESS = config.getAttr("SJR_PROCESS")

    startyr = int(config.getAttr('START_DATE')[5:])
    endyr = int(config.getAttr('END_DATE')[5:])

    if (startyr < 1974 and endyr > 1991):
        twstr = "01NOV1921 0000 - 01OCT2003 0000"
    else:
        twstr = "01OCT1974 0000 - 01OCT1991 0000"

    path = "/CALSIM/NDO/FLOW-NDO//" + STEP + "/" + fpart + "/"
    ndo = dss_retrieve_ts(calsimfile, path, twstr)
    print ndo
    ndo15 = conserve.conserveSpline(ndo, "15MIN")
    if (SJR_PROCESS.upper() == "SINGLE_STEP") or (SJR_PROCESS.upper()
                                                  == "MULTI_STEP"):
        fpart_modified = calsim_study_fpart(modify=1)
        delta_ndo = calc_vamp_delta_ndo(calsimfile, outdss, fpart,
                                        fpart_modified, SJR_PROCESS)
        ndo15_vamp = ndo15 + interpolate(delta_ndo, "15MIN")

    writedss(calsimfile, "/CALSIM/NDO/FLOW-NDO//15MIN/" + fpart + "/",
             ndo15_vamp)
def moke_consumnes():
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    moke_us_path = calsim_path("I504")
    moke_ref = findpath(f, moke_us_path)
    if moke_ref and len(moke_ref) > 0:
        print "Upstream mokelumne flow found, not calculated"

    consumnes_path = calsim_path("C501")
    moke_ds_path = calsim_path("C504")
    consumnes_ref = findpath(f, consumnes_path)
    if not consumnes_ref:
        raise "Consumnes path %s not found" % consumnes_path
    moke_ds_ref = findpath(f, moke_ds_path)
    if not moke_ds_ref:
        raise "Mokulemne downstream path %s not found" % moke_ds_path
    consumnes = DataReference.create(consumnes_ref[0], tw).getData()
    moke_ds = DataReference.create(moke_ds_ref[0], tw).getData()
    mf = calsim_study_fpart(modify=1)
    moke_us_path = calsim_path("I504", mf)
    moke_us = moke_ds - consumnes

    writedss(outfile, moke_us_path, moke_us)
    return
Example #7
0
def transfer_ec():
    """ Unsmoothed transfer from CALSIM file to model input file.
    """
    
    f=opendss(getAttr("CALSIMFILE"))           # open CALSIM file
    outfile=getAttr("BOUNDARYFILE")
    process=getAttr("SJR_PROCESS")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"    
    tw=prepro_window()
    calsimstudy=calsim_study_fpart(modify=0)
    calsimstudyout=calsim_study_fpart(modify=1)
    if not calsimstudy or calsimstudy=="":
        print "CALSIMSTUDY envvar not set"
    dsspath="/CALSIM.*/VERNWQFINAL/SALINITY-EC//1MON/%s/" % calsimstudy
    processedpath=dsspath.replace(".*","-"+process).replace(
        "1MON","1DAY").replace(calsimstudy,calsimstudyout)
    print processedpath
    refs=findpath(f,dsspath)
    if not refs or len(refs)> 1:
        raise "Vernalis EC path %s not found or not unique" % dsspath
    ref=DataReference.create(refs[0],tw)
    monthly=ref.getData()
    daily=interpolate(monthly,"1DAY")
    
    if daily:
        writedss(outfile,processedpath, daily)
    else:
        raise "Failure to find CALSIM input data for: " + calsimname 
    return
def txfr_flow_day(nodes_to_txfr_day):
    """ Unsmoothed transfer from CALSIM file to model input file.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    for calsimname in nodes_to_txfr_day:  # Extend the list as needed
        mf = calsim_study_fpart(modify=1)
        dsspath = calsim_path(calsimname)
        dsspath1 = calsim_path(calsimname, modified_fpart=mf)
        processedpath = dsspath1.replace("1MON", "1DAY")
        print dsspath
        print processedpath
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()
        daily = interpolate(monthly, "1DAY")
        if daily:
            writedss(outfile, processedpath, daily)
        else:
            raise "Failure to find CALSIM input data for: " + calsimname
Example #9
0
def extend_flow(nodes_to_extend):
    """ Copying WY1922 data to WY1921 for allowing to preprocessing and running DSM2
        from 01Jan1921.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = timewindow("01OCT1921 0000 - 01OCT1922 0000")

    for calsimname in nodes_to_extend:
        print calsimname
        dsspath = calsim_path(calsimname)
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()

        itr = monthly.getIterator()
        d = zeros(len(monthly), 'd')
        count = 0
        while not itr.atEnd():
            el = itr.getElement()
            d[count] = el.getY()
            count = count + 1
            itr.advance()
        stime = "01OCT1920 0000"
        rts = RegularTimeSeries(monthly.getName(),stime, \
              timeinterval("1MON").toString(), d, None, monthly.getAttributes())
        writedss(calsimfile, ref.getPathname().toString(), rts)
Example #10
0
def dccOp(infile,outfile,inpath,outpath,allThirty=1,value=1.0,
          tw="01OCT1974 0000 - 01OCT1991 0000"):
  """
  Converts Delta Cross Channel gate operation
  from a CALSIM file containing fraction of days open
  Args:
    infile        CALSIM dss file specifying # days operating
    outfile       output dss file readable by DSM2
    inpath        input path, e.g. /CALSIM/DXC/GATE-DAYS-OPEN//1MON//
    outpath       output path, e.g. /CALSIM/DXC/GATE//IR-YEAR/fpart/ 
    value         time series value when gate is opened (must be 1.0 or 2.0),
                  where 1.0 is used for gate ops and 2.0 is number gates operating.
    tw            time window of output
    allThirty     true if CALSIM input is hardwired to thirty day months
  """  
  from vutils import timewindow
  from vdss import opendss,findpath,writedss
  from vista.time import TimeWindow
  from vista.set import DataReference
  import types
  g=opendss(infile)
  if not (type(outfile) == types.StringType):
    raise TypeError("Argument outfile should be name of a dss file")
  if not isinstance(tw,TimeWindow):
    tw = timewindow(tw)
  if not (value==1.0 or value==2.0):
    raise "Output time series 'on' value should be 1.0 (gate op) or 2.0 (gates operating)"
  x=findpath(g,inpath)[0]
  dxcref = DataReference.create(findpath(g,inpath)[0],tw)
  dxc=dxcref.getData()
  if not dxc:
    raise "Cross channel data not found"
  dxcITS=daysPerMonthToITS(dxc,value,allThirty)
  writedss(outfile,outpath,dxcITS)
  return dxcITS
Example #11
0
def transfer_flow(nodes_to_transfer):
    """ Unsmoothed transfer from CALSIM file to model input file.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    for calsimname in nodes_to_transfer:  # Extend the list as needed, but please keep in mind the
        # limitations of the conservative spline, at least at present.
        # Mainly, input flows should be substantially greater than
        # zero at all times (yolo would be inappropriate, for instance)
        dsspath = calsim_path(calsimname)
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()
        mf = calsim_study_fpart(modify=1)
        dsspath = calsim_path(calsimname, modified_fpart=mf)
        if monthly:
            writedss(outfile, dsspath, monthly)

        else:
            raise "Failure to find CALSIM input data for: " + calsimname
Example #12
0
def doall(dicufile, bndryfile):
    sum = net_dicu(dicufile, [334, 333, 332, 331, 330])
    pathname = sum.getPathname().toString()
    pathname = set_part(pathname, 'NET_DICU_NORTH_OF_FREEPORT',
                        Pathname.B_PART)
    pathname = set_part(pathname, 'FLOW', Pathname.C_PART)
    writedss(dicufile, pathname, sum.getData())
Example #13
0
def doall(dicufile, bndryfile):
    sum = net_dicu(dicufile, [335, 336, 337, 338, 339, 340, 341, 342])
    pathname = sum.getPathname().toString()
    pathname = set_part(pathname, 'NET_DICU_RSAC155_TO_RSAC128',
                        Pathname.B_PART)
    pathname = set_part(pathname, 'FLOW', Pathname.C_PART)
    writedss(dicufile, pathname, sum.getData())
Example #14
0
def prep_ndo(calsimf, dss_step, dss_fpart, twstr):
    ndofile = opendss(calsimf)
    TWIND = timewindow(twstr)
    ndo = DataReference.create(
        findpath(ndofile, "/CALSIM/NDO/FLOW-NDO//" + dss_step + "/" +
                 dss_fpart + "/")[0], TWIND).getData()
    ndo15 = conserve.conserveSpline(ndo, "15MIN")
    ndo15.getAttributes().setYUnits("CFS")
    ndo15.getAttributes().setYType("PER-AVER")
    writedss(calsimf, "/CALSIM/NDO/FLOW-NDO//15MIN/" + dss_fpart + "/", ndo15)
    return 0
def compare_dss_files(file1, file2, showPlot=False, outputFile=None, outputPathFile=None):
    """
    Simply compares the files and outputs differences if any of those that differ and lists mismatching pathnames in either
    """
    g1 = vutils.opendss(file1)
    g2 = vutils.opendss(file2)
    print 'Comparing %s to %s' % (file1, file2)
    print '%12s\t%32s' % ('DIFFERENCE', 'PATHNAME')
    if outputPathFile:
        opf_handle=open(outputPathFile, 'wb')
        opf = csv.writer(opf_handle, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL)
        opf.writerow([file1,file2])
    no_diff=True
    for ref1 in g1:
        p1 = ref1.pathname
        found = False
        for ref2 in g2:
            p2 = ref2.pathname
            if matches(p1, p2):
                found = True
                diff = ref2.data - ref1.data
                absdiff = diff.createSlice(diff.getTimeWindow())
                vtimeseries.apply(absdiff, math.fabs)
                diff_total = vtimeseries.total(absdiff)
                if (diff_total > 1e-06) :
                    no_diff=False
                    if showPlot: plot(ref1.data, ref2.data)
                    print '%10.2f\t%32s' % (diff_total, p1)
                    if outputFile:
                        diffp = set_part(p1, 'DIFF-%s-%s' % (os.path.basename(file1), os.path.basename(file2)), Pathname.A_PART)
                        writedss(outputFile, str(diffp), diff)
                    if outputPathFile:
                        opf.writerow([p1, p1, diff_total])
                break
        if (not found):
            no_diff=False
            print 'No matching path: %s in file %s NOT found in file %s' % (p1, file1, file2)
            if outputPathFile: opf.writerow([p1, "", "M"])
    for ref2 in g2:
        p2 = ref2.pathname
        found = False
        for ref1 in g1:
            p1 = ref1.pathname
            if matches(p1, p2):
                found = True
                break
        if (not found):
            no_diff=False
            print 'No matching path: %s in file %s NOT found in file %s' % (p2, file2, file1)
            if outputPathFile: opf.writerow(["", p2, "M"])
    if no_diff:
        print 'NO DIFFERENCE ACROSS ENTIRE FILEs %s and %s'%(file1,file2)
    if outputPathFile: opf_handle.close()
def prep_dicu(infile,outfile,tw):
    from vdss import opendss,writedss,findpath
    if (not infile): raise TypeError("infile was None")
    if (not outfile): raise TypeError("outfile was None")    
    if (not infile.endswith("dss") and outfile.endswith("dss")):
        raise "infile and outfile arguments must be a dss file"
    g=opendss(infile)
    all=findpath(g,"///DRAIN-EC////")
    for i in range(len(all)):
        ts=all[i].getData()
        s=expand_seasonal(ts,tw)
        writedss(outfile,all[i].getPathname().toString(),s)
Example #17
0
def prep_dicu(infile, outfile, tw):
    from vdss import opendss, writedss, findpath
    if (not infile): raise TypeError("infile was None")
    if (not outfile): raise TypeError("outfile was None")
    if (not infile.endswith("dss") and outfile.endswith("dss")):
        raise "infile and outfile arguments must be a dss file"
    g = opendss(infile)
    all = findpath(g, "///DRAIN-EC////")
    for i in range(len(all)):
        ts = all[i].getData()
        s = expand_seasonal(ts, tw)
        writedss(outfile, all[i].getPathname().toString(), s)
Example #18
0
def download_data_in_yearly_chunks(station_name, sensor_number, start_year, end_year, file):
	"""
	Downloads data in year long chunks. CDEC cannot handle really large data requests so this is the best way to 
	work around that.
	It writes the data to file of your choice as opposed to returning a time series.
	"""
	for year in range(start_year, end_year):
		start_date = "01/01/%s"%str(year)
		end_date="12/31/%s"%(str(year))
		rts=retrieve(station_name,sensor_number,start_date,end_date,verbose=1)
		#if rts: vdisplay.plot(rts)
		writedss(file,rts.name,rts)
#
def copy_dicu_flow():
    """ Unsmoothed transfer from DICU file to model input file.
    """
    dicufile=getAttr("DICUFLOWFILE") 
    f=opendss(dicufile)           # open CALSIM file
    outfile=getAttr("DICUFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"    
    tw=prepro_window()

    for item in f : 
        ref=DataReference.create(item,tw)
        data=ref.getData()
        writedss(outfile,ref.getPathname().toString(), data)
Example #20
0
def copy_dicu_flow():
    """ Unsmoothed transfer from DICU file to model input file.
    """
    dicufile=getAttr("DICUFLOWFILE") 
    f=opendss(dicufile)           # open CALSIM file
    outfile=getAttr("DICUFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"    
    tw=prepro_window()

    for item in f : 
        ref=DataReference.create(item,tw)
        data=ref.getData()
        writedss(outfile,ref.getPathname().toString(), data)
Example #21
0
def copy_stage():
    """ Unsmoothed transfer from DICU file to model input file.
    """
    sourcefile=getAttr("STAGE_SOURCE_FILE") 
    f=opendss(sourcefile)
    outfile=getAttr("STAGEFILE")
    stageversion=getAttr("STAGE_VERSION")
    dsspath="/FILL\+CHAN/RSAC054/STAGE//15MIN/%s/" % stageversion
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"    
    tw=prepro_window()
    ref = findpath(f,dsspath)
    if len(ref) != 1:
        raise "Stage path not found or not unique. Found refs: %s" % ref
    ref=DataReference.create(ref[0],tw)
    data=ref.getData()
    writedss(outfile,ref.getPathname().toString(), data)
Example #22
0
def copy_stage():
    """ Unsmoothed transfer from DICU file to model input file.
    """
    sourcefile = getAttr("STAGE_SOURCE_FILE")
    f = opendss(sourcefile)
    outfile = getAttr("STAGEFILE")
    stageversion = getAttr("STAGE_VERSION")
    dsspath = "/FILL\+CHAN/RSAC054/STAGE//15MIN/%s/" % stageversion
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()
    ref = findpath(f, dsspath)
    if len(ref) != 1:
        raise "Stage path not found or not unique. Found refs: %s" % ref
    ref = DataReference.create(ref[0], tw)
    data = ref.getData()
    writedss(outfile, ref.getPathname().toString(), data)
def smooth_flow(nodes_to_smooth):
    """ A slightly smoothed version of monthly flows to avoid sharp transitions
        between months. Uses a tension spline.
    """
    calsimfile=getAttr("CALSIMFILE")
    f=opendss(calsimfile)           # open CALSIM file
    outfile=getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    fpart_mod=calsim_study_fpart(modify=1)
    
    tw=prepro_window()


    for calsimname in nodes_to_smooth:      # Extend the list as needed, but please keep in mind the
                                            # limitations of the conservative spline, at least at present.
                                            # Mainly, input flows should be substantially greater than
                                            # zero at all times (yolo would be inappropriate, for instance)
        dsspath = calsim_path(calsimname)
        paths=findpath(f,dsspath)
        if not paths or len(paths)>1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath
        
        ref=DataReference.create(paths[0],tw)
        monthly=ref.getData()
        if monthly:		
            if len(monthly) < 4:
			    raise "Length of monthly data too short for smoothing. Wrong time window?"
            try:
                daily=conserve.conserveSpline(monthly,"1DAY")
            except:
                print "Failure to smooth path: %s over time window: %s" % (paths[0], tw)
                raise 
                
            daily.getAttributes().setYUnits(Units.CFS)
            writedss(outfile,
                     "/CALSIM-SMOOTH/"+calsimname+"/FLOW/1DAY//" \
                     +fpart_mod+"/",
                     daily)
        else:
            raise "Failure to find CALSIM input data for: " + calsimname 
Example #24
0
def smooth_flow(nodes_to_smooth):
    """ A slightly smoothed version of monthly flows to avoid sharp transitions
        between months. Uses a tension spline.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    fpart_mod = calsim_study_fpart(modify=1)

    tw = prepro_window()

    for calsimname in nodes_to_smooth:  # Extend the list as needed, but please keep in mind the
        # limitations of the conservative spline, at least at present.
        # Mainly, input flows should be substantially greater than
        # zero at all times (yolo would be inappropriate, for instance)
        dsspath = calsim_path(calsimname)
        paths = findpath(f, dsspath)
        if not paths or len(paths) > 1:
            print "File: %s" % calsimfile
            raise "Path %s not found or not unique" % dsspath

        ref = DataReference.create(paths[0], tw)
        monthly = ref.getData()
        if monthly:
            if len(monthly) < 4:
                raise "Length of monthly data too short for smoothing. Wrong time window?"
            try:
                daily = conserve.conserveSpline(monthly, "1DAY")
            except:
                print "Failure to smooth path: %s over time window: %s" % (
                    paths[0], tw)
                raise

            daily.getAttributes().setYUnits(Units.CFS)
            writedss(outfile,
                     "/CALSIM-SMOOTH/"+calsimname+"/FLOW/1DAY//" \
                     +fpart_mod+"/",
                     daily)
        else:
            raise "Failure to find CALSIM input data for: " + calsimname
Example #25
0
def dccOp(infile,
          outfile,
          inpath,
          outpath,
          allThirty=1,
          value=1.0,
          tw="01OCT1974 0000 - 01OCT1991 0000"):
    """
  Converts Delta Cross Channel gate operation
  from a CALSIM file containing fraction of days open
  Args:
    infile        CALSIM dss file specifying # days operating
    outfile       output dss file readable by DSM2
    inpath        input path, e.g. /CALSIM/DXC/GATE-DAYS-OPEN//1MON//
    outpath       output path, e.g. /CALSIM/DXC/GATE//IR-YEAR/fpart/ 
    value         time series value when gate is opened (must be 1.0 or 2.0),
                  where 1.0 is used for gate ops and 2.0 is number gates operating.
    tw            time window of output
    allThirty     true if CALSIM input is hardwired to thirty day months
  """
    from vutils import timewindow
    from vdss import opendss, findpath, writedss
    from vista.time import TimeWindow
    from vista.set import DataReference
    import types
    g = opendss(infile)
    if not (type(outfile) == types.StringType):
        raise TypeError("Argument outfile should be name of a dss file")
    if not isinstance(tw, TimeWindow):
        tw = timewindow(tw)
    if not (value == 1.0 or value == 2.0):
        raise "Output time series 'on' value should be 1.0 (gate op) or 2.0 (gates operating)"
    x = findpath(g, inpath)[0]
    dxcref = DataReference.create(findpath(g, inpath)[0], tw)
    dxc = dxcref.getData()
    if not dxc:
        raise "Cross channel data not found"
    dxcITS = daysPerMonthToITS(dxc, value, allThirty)
    writedss(outfile, outpath, dxcITS)
    return dxcITS
def calaveras():
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    cal_path = calsim_path("C508")
    cal_ret_path = calsim_path("R514")
    cal_ref = findpath(f, cal_path)
    if not cal_ref:
        raise "Calaveras path %s not found" % cal_path
    cal_ret_ref = findpath(f, cal_ret_path)
    if not cal_ret_ref:
        raise "Calaveras return flow path %s not found" % cal_ret_path
    cal = DataReference.create(cal_ref[0], tw).getData()
    cal_ret = DataReference.create(cal_ret_ref[0], tw).getData()
    mf = calsim_study_fpart(modify=1)
    cal_in_path = calsim_path("C508_R514", mf)
    cal_in = cal + cal_ret
    writedss(outfile, cal_in_path, cal_in)
    return
Example #27
0
def calaveras():
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file
    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    tw = prepro_window()

    cal_path = calsim_path("C508")
    cal_ret_path = calsim_path("R514")
    cal_ref = findpath(f, cal_path)
    if not cal_ref:
        raise "Calaveras path %s not found" % cal_path
    cal_ret_ref = findpath(f, cal_ret_path)
    if not cal_ret_ref:
        raise "Calaveras return flow path %s not found" % cal_ret_path
    cal = DataReference.create(cal_ref[0], tw).getData()
    cal_ret = DataReference.create(cal_ret_ref[0], tw).getData()
    mf = calsim_study_fpart(modify=1)
    cal_in_path = calsim_path("C508_R514", mf)
    cal_in = cal + cal_ret
    writedss(outfile, cal_in_path, cal_in)
    return
Example #28
0
def project_export_limits(pulse_limit, ei_ratio,delta_inflow):
    """Refine export limits to include EI ratio and allocate
    limits to CVP and SWP.
   
      Arguments:
      pulse_limit: the raw combined export limit from CALSIM
      ei_ratio:     the maximum E/I ratio calculated by CALSIM
      delta_inflow: total inflow to the delta calculated by CALSIM
      
      Output:
      swp_limit,cvp_limit: Maximum pumping allowed during VAMP for
                           each of the individual projects. This routine
                           calculates maximum pumping, not actual pumping
    """
    if ei_ratio.getStartTime() != pulse_limit.getStartTime():
        raise ValueError("EI limit and total export limit must have same start time")

    # Limit pulse according to EI ratio
    eilimit=ei_ratio*delta_inflow
    
    # Now apply export limit. in the CALSIM file the limit probably
    # will have values only for APR and MAY, whereas the ei limit 
    # exists every month
    tsmonth=month_numbers(pulse_limit)
    is_april_may=(tsmonth==4)+(tsmonth==5)
    limit=ts_where(is_april_may *(pulse_limit < eilimit) > 0.,
                              pulse_limit,
                              eilimit)
    
    if DEBUG:
        writedss("out","/CALC/LIM/////",limit) 

    # Try to allocate to cvp and swp equally. CVP has a 
    # mimimum pumping level of 800cfs in which case SWP takes the rest
    even_allocation=limit/2.
    # trick that converts scalar cvp min to time series with same start, interval
    cvp_min_pump=even_allocation*0. + CVP_MIN_PUMP 
    # forget about cavitation limit in the (fix CALSIM!!) case where
    # the cavitation minimum is less than total pumping limit for both
    # projects combined -- instead use the total pumping limit
    cvp_min_pump=ts_where(cvp_min_pump > limit, limit, cvp_min_pump)
    cvp_limit=ts_where(even_allocation > cvp_min_pump,
                                   even_allocation,
                                   cvp_min_pump)
    swp_limit=limit-cvp_limit
    if DEBUG:
        writedss("out","/CALC/EVENALLOC/////",even_allocation)   
        writedss("out","/CALC/CVPLIM/////",cvp_limit)
        writedss("out","/CALC/SWPLIM/////",swp_limit)
        writedss("out","/CALC/PULSELIM/////",pulse_limit)
        writedss("out","/CALC/EILIM/////",eilimit)
    return swp_limit,cvp_limit
Example #29
0
import sys
Example #30
0
        print "IN prep DOC"
        infile = sys.argv[1]
        config.setConfigVars(infile)
        tw = prepro_window()
        print "Expanding seasonal DICU DOC drainage values"
        expand_seasonal_bst.prep_dicu(
            config.getAttr('DICUFILE_DOC'),  # original DICU DSS file for EC
            config.getAttr(
                'DICUFILE_DOCE'
            ),  # processed DICU DSS file (will be input for DSM2)
            "DRAIN-DOC",
            tw)
        print "Expanding seasonal boundary DOC values"
#        expand_seasonal_bst.prep_dicu(
#            config.getAttr('TSFILE_DOC'),        # original DICU DSS file for EC
#            config.getAttr('DICUFILE_DOCE'),       # processed DICU DSS file (will be input for DSM2)
#            "DOC",tw)

    f = opendss(config.getAttr("TSFILE_DOC"))  # open DOC boundary file
    outfile = config.getAttr("BOUNDARYFILE")
    f2 = find(f, 'DOC', 'c')
    for ref in f2:
        path = ref.getPathname()
        daily = vutils.interpolate(ref.getData(), "1DAY")
        path.setPart(Pathname.E_PART, '1DAY')
        path.setPart(Pathname.F_PART, config.getAttr('DSM2MODIFIER'))
        mod_path = path.toString()
        writedss(outfile, mod_path, daily)

    sys.exit()
Example #31
0
def planning_ec_mtz(): # MTZ = RSAC054 BC for the qual
    DEBUG = 0
    OUTPUT=config.getAttr('QUALBOUNDARYFILE')
    calsimfile = config.getAttr('CALSIMFILE')
    vamp_corrected_dss = config.getAttr('CALSIM_VAMP')
    CALSIM=opendss(calsimfile)
    PLANNINGTIDE=opendss(config.getAttr('STAGE_SOURCE_FILE'))
    STEP=string.lower(config.getAttr('CALSIMSTEP'))
    SJR_PROCESS=config.getAttr("SJR_PROCESS")    
    outputpath="/FILL+CHAN/RSAC054/EC//15MIN/"+config.getAttr("DSM2MODIFIER")+"/"
    if not(OUTPUT and os.path.exists(OUTPUT)):
        raise "Envvar QUALBOUNDARYFILE must exist as destination for EC"
        
    startyr=int(config.getAttr('START_DATE')[5:])
    endyr=int(config.getAttr('END_DATE')[5:])
    
    if (startyr < 1974 and endyr > 1991):
#        blocks= [ "01NOV1921 0000 - 01OCT1940 0000",
        blocks= [ "01FEB1921 0000 - 01OCT1940 0000",
             "01OCT1940 0000 - 01OCT1960 0000",
             "01OCT1960 0000 - 01OCT1974 0000",
             "01OCT1974 0000 - 01OCT1991 0000",
             "01OCT1991 0000 - 01OCT2003 0000"
                ]
    else: 
        blocks = [ "01OCT1974 0000 - 01OCT1991 0000" ]
                                                      # for memory reasons (year 2001).

    g0=5000.                                          # initial value of g (antecedent outflow) for the beginning
                                                      # of the first year. This is pretty arbitrary and makes little difference
    if DEBUG:
        g0_no_vamp = 5000.

    for twstr in blocks:    
        TWIND=timewindow(twstr)        # Actual period to be estimated
        print "Calculating boundary salinity for the period "+TWIND.toString()
        TWINDBUF=grow_window(TWIND,"1MON","1MON")     # Conservative buffered period for retrieval
                                                      # so that after prelimiary operations (e.g. time average)
                                                      # time series will still span at least TWIND
        fpart=calsim_study_fpart(modify=0)
        ndo=DataReference.create(findpath(CALSIM,"/CALSIM/NDO/FLOW-NDO//"+STEP+"/"
                                  +fpart+"/")[0],TWIND).getData()
        ndo15=conserve.conserveSpline(ndo,"15MIN")
        ndo15_no_vamp = 0
        if DEBUG:
            ndo15_no_vamp = ndo15
# removed since currently there is no difference between vamp and monthly average.
            # calc  vamp caused ndo change
#        if (SJR_PROCESS.upper()=="SINGLE_STEP") or (SJR_PROCESS.upper()=="MULTI_STEP"):
#            fpart_modified=calsim_study_fpart(modify=1)
#            delta_ndo = vamp_ndo.calc_vamp_delta_ndo(calsimfile,vamp_corrected_dss,fpart,fpart_modified,SJR_PROCESS)
#            ndo15 = ndo15 + interpolate(delta_ndo, "15MIN")
		
        astro_stage_version = config.getAttr("ASTRO_STAGE_VERSION")
        mtzastro=DataReference.create(findpath(PLANNINGTIDE,"/FILL\+CHAN/RSAC054/STAGE//15MIN/"+astro_stage_version + "/")[0],TWINDBUF).getData()

        astrorms=godin((mtzastro*mtzastro)**0.5)           # RMS energy of tide (used to predict filling and draining)
        dastrorms=(  (astrorms-(astrorms>>1))*96. ).createSlice(TWIND)    
        fifteenflo2=ndo15  - 40000*(dastrorms)

        # call to ec estimator. all parameters are included. g0 is an
        [mtzecest, g1]=ec_boundary.ECEst(mtzastro,fifteenflo2,beta=600,npow1=0.75,npow2=1,g0=g0,zrms=astrorms)
       
        if DEBUG:
            fifteenflo2_no_vamp = ndo15_no_vamp  - 40000*(dastrorms)
            [mtzecest_no_vamp, g1_no_vamp]=ec_boundary.ECEst(mtzastro,fifteenflo2_no_vamp,beta=600,npow1=0.75,npow2=1,g0=g0_no_vamp,zrms=astrorms)
            g0_no_vamp = g1_no_vamp
            writedss("out_ec_check","/CALC/ndo_no_vamp/ndo////", ndo15_no_vamp)
            writedss("out_ec_check","/CALC/ndo_with_vamp/ndo////", ndo15)
            writedss("out_ec_check","/CALC/ndo_no_vamp/ec////", mtzecest_no_vamp)
            writedss("out_ec_check","/CALC/ndo_with_vamp/ec////", mtzecest)
        writedss(OUTPUT,outputpath,mtzecest)

        g0=g1
    return 0
    else:
        print "IN prep DOC"
        infile = sys.argv[1]
        config.setConfigVars(infile)
        tw=prepro_window()
        print "Expanding seasonal DICU DOC drainage values"
        expand_seasonal_bst.prep_dicu(
            config.getAttr('DICUFILE_DOC'),        # original DICU DSS file for EC
            config.getAttr('DICUFILE_DOCE'),       # processed DICU DSS file (will be input for DSM2)
            "DRAIN-DOC",tw)
        print "Expanding seasonal boundary DOC values"
#        expand_seasonal_bst.prep_dicu(
#            config.getAttr('TSFILE_DOC'),        # original DICU DSS file for EC
#            config.getAttr('DICUFILE_DOCE'),       # processed DICU DSS file (will be input for DSM2)
#            "DOC",tw)
    
    f=opendss(config.getAttr("TSFILE_DOC"))           # open DOC boundary file
    outfile=config.getAttr("BOUNDARYFILE")
    f2=find(f,'DOC','c')
    for ref in f2:
        path = ref.getPathname()
        daily = vutils.interpolate(ref.getData(),"1DAY")
        path.setPart(Pathname.E_PART, '1DAY')
        path.setPart(Pathname.F_PART, config.getAttr('DSM2MODIFIER'))
        mod_path = path.toString()
        writedss(outfile, mod_path, daily)

    sys.exit()


Example #33
0
def dss_store_ts(file, path, ts):
    writedss(file, path, ts)
Example #34
0
def compare_dss_files(file1,
                      file2,
                      showPlot=False,
                      outputFile=None,
                      outputPathFile=None):
    """
    Simply compares the files and outputs differences if any of those that differ and lists mismatching pathnames in either
    """
    g1 = vutils.opendss(file1)
    g2 = vutils.opendss(file2)
    print 'Comparing %s to %s' % (file1, file2)
    print '%12s\t%32s' % ('DIFFERENCE', 'PATHNAME')
    if outputPathFile:
        opf_handle = open(outputPathFile, 'wb')
        opf = csv.writer(opf_handle,
                         delimiter=",",
                         quotechar='"',
                         quoting=csv.QUOTE_MINIMAL)
        opf.writerow([file1, file2])
    no_diff = True
    for ref1 in g1:
        p1 = ref1.pathname
        found = False
        for ref2 in g2:
            p2 = ref2.pathname
            if matches(p1, p2):
                found = True
                diff = ref2.data - ref1.data
                absdiff = diff.createSlice(diff.getTimeWindow())
                vtimeseries.apply(absdiff, math.fabs)
                diff_total = vtimeseries.total(absdiff)
                if (diff_total > 1e-06):
                    no_diff = False
                    if showPlot: plot(ref1.data, ref2.data)
                    print '%10.2f\t%32s' % (diff_total, p1)
                    if outputFile:
                        diffp = set_part(
                            p1, 'DIFF-%s-%s' %
                            (os.path.basename(file1), os.path.basename(file2)),
                            Pathname.A_PART)
                        writedss(outputFile, str(diffp), diff)
                    if outputPathFile:
                        opf.writerow([p1, p1, diff_total])
                break
        if (not found):
            no_diff = False
            print 'No matching path: %s in file %s NOT found in file %s' % (
                p1, file1, file2)
            if outputPathFile: opf.writerow([p1, "", "M"])
    for ref2 in g2:
        p2 = ref2.pathname
        found = False
        for ref1 in g1:
            p1 = ref1.pathname
            if matches(p1, p2):
                found = True
                break
        if (not found):
            no_diff = False
            print 'No matching path: %s in file %s NOT found in file %s' % (
                p2, file2, file1)
            if outputPathFile: opf.writerow(["", p2, "M"])
    if no_diff:
        print 'NO DIFFERENCE ACROSS ENTIRE FILEs %s and %s' % (file1, file2)
    if outputPathFile: opf_handle.close()
def doall(dicufile,bndryfile):
    sum=net_dicu(dicufile,[335,336,337,338,339,340,341,342])
    pathname=sum.getPathname().toString()
    pathname=set_part(pathname,'NET_DICU_RSAC155_TO_RSAC128',Pathname.B_PART)
    pathname=set_part(pathname,'FLOW',Pathname.C_PART)
    writedss(dicufile,pathname,sum.getData())
Example #36
0
    config_file=sys.argv[1]
    from ConfigParser import ConfigParser
    config=ConfigParser()
    config.read(config_file)
    tidefile=config.get('default','tidefile')
    cranges_raw=config.get('default','channel_ranges')
    try:
        twstr=config.get('default','timewindow');
    except:
        twstr=None
    try:
        reservoir_names=config.get('default','reservoir_names')
        reservoir_names=string.split(reservoir_names,',')
    except:
        reservoir_names=None
    print 'Calculating volume from tidefile: %s'%tidefile
    print 'Channel ranges: %s'+cranges_raw
    #channel_ranges=[(54,105),(183,203),(125,145),(204,225),(217,231),(233,235),(252,257)]
    #channel_ranges=[(54,82),(84,105),(183,203),(125,145),(204,214),(216,231),(233,235),(252,259)]
    channel_ranges=eval(cranges_raw)
    volumes=get_volumes_data(tidefile, channel_ranges, twstr)
    if reservoir_names != None:
        reservoir_volumes=get_reservoir_volumes_data(tidefile,reservoir_names,twstr)
        volumes.extend(reservoir_volumes)
    total_volume=total(volumes)
    outdssfile=config.get('default','output_dss_file')
    outdsspath=config.get('default','output_dss_path')
    print 'Writing out to %s as %s'%(outdssfile,outdsspath)
    writedss(outdssfile,outdsspath,total_volume)
    exit(0)
# 
Example #37
0
def project_export_limits(pulse_limit, ei_ratio, delta_inflow):
    """Refine export limits to include EI ratio and allocate
    limits to CVP and SWP.
   
      Arguments:
      pulse_limit: the raw combined export limit from CALSIM
      ei_ratio:     the maximum E/I ratio calculated by CALSIM
      delta_inflow: total inflow to the delta calculated by CALSIM
      
      Output:
      swp_limit,cvp_limit: Maximum pumping allowed during VAMP for
                           each of the individual projects. This routine
                           calculates maximum pumping, not actual pumping
    """
    if ei_ratio.getStartTime() != pulse_limit.getStartTime():
        raise ValueError(
            "EI limit and total export limit must have same start time")

    # Limit pulse according to EI ratio
    eilimit = ei_ratio * delta_inflow

    # Now apply export limit. in the CALSIM file the limit probably
    # will have values only for APR and MAY, whereas the ei limit
    # exists every month
    tsmonth = month_numbers(pulse_limit)
    is_april_may = (tsmonth == 4) + (tsmonth == 5)
    limit = ts_where(is_april_may * (pulse_limit < eilimit) > 0., pulse_limit,
                     eilimit)

    if DEBUG:
        writedss("out", "/CALC/LIM/////", limit)

    # Try to allocate to cvp and swp equally. CVP has a
    # mimimum pumping level of 800cfs in which case SWP takes the rest
    even_allocation = limit / 2.
    # trick that converts scalar cvp min to time series with same start, interval
    cvp_min_pump = even_allocation * 0. + CVP_MIN_PUMP
    # forget about cavitation limit in the (fix CALSIM!!) case where
    # the cavitation minimum is less than total pumping limit for both
    # projects combined -- instead use the total pumping limit
    cvp_min_pump = ts_where(cvp_min_pump > limit, limit, cvp_min_pump)
    cvp_limit = ts_where(even_allocation > cvp_min_pump, even_allocation,
                         cvp_min_pump)
    swp_limit = limit - cvp_limit
    if DEBUG:
        writedss("out", "/CALC/EVENALLOC/////", even_allocation)
        writedss("out", "/CALC/CVPLIM/////", cvp_limit)
        writedss("out", "/CALC/SWPLIM/////", swp_limit)
        writedss("out", "/CALC/PULSELIM/////", pulse_limit)
        writedss("out", "/CALC/EILIM/////", eilimit)
    return swp_limit, cvp_limit
Example #38
0
def calculate_exports(limit, average_value):
    """Determines pulse and non-pulse export flows
      for cvp or swp given the refined limits on the
      export and the average for one of the projects.
      This routine assures that the limit is
      only used if it will reduce pumping 
      and that the pulse and non-pulse flows combine to 
      give the correct total monthly average pumping.
      
      Arguments:
      limit: time series of (refined) limits 
                     on exports for the project
                     (cvp or swp) being analyzed.
      average_value: time series of monthly average pumping 
                     for the project
      
      Output:
      export_value:  Time series of actual exports.
      
    """
    total_time_in_month,pulse_time_in_month,non_pulse_time_in_month = \
      calculate_vamp_times(limit)

    # Calculate a volumetrically correct non-pulse flow given that average_value gives
    # the total volume of pumping for the month and that pulse pumping is at the limit
    limit_volume = limit * pulse_time_in_month
    total_volume = average_value * total_time_in_month
    non_pulse_volume = total_volume - limit_volume
    non_pulse_flow = non_pulse_volume / non_pulse_time_in_month
    volume_corrected_limit = replace_vamp(
        limit,  # replace shoulder first
        non_pulse_flow,
        include_shoulder=1)
    volume_corrected_limit = replace_vamp(
        volume_corrected_limit,  #now correct pulse period
        limit,
        include_shoulder=0)

    # Create an indicator time series to show months where the  pulse pumping
    # limit is greater than average (the  pulse pumping limit should be a curtailment).
    # Note that this is converted to daily by "spreading it out" over the days, but it
    # is an indicator of a monthly condition and every day has the same value.
    limit_exceeds_average = limit > average_value
    limit_exceeds_average = interpolate(limit_exceeds_average, '1DAY')
    average_value = interpolate(average_value, '1DAY')
    # Now use average_value for months where the pulse limit would lead to an increase
    # and the volume-corrected pulse/non-pulse combination otherwise.
    export_value = ts_where(limit_exceeds_average, average_value,
                            volume_corrected_limit)
    if (DEBUG):
        writedss("out.dss", "/EXP/CVP/EXPORT////", export_value)
        writedss("out.dss", "/EXP/CVP/VCL////", volume_corrected_limit)
        writedss("out.dss", "/EXP/CVP/NONPULSE////",
                 interpolate(non_pulse_flow, '1DAY'))
        writedss("out.dss", "/EXP/CVP/AVE////", average_value)
        writedss("out.dss", "/EXP/CVP/LIM////", interpolate(limit, '1DAY'))
        writedss("out.dss", "/EXP/CVP/LIM_EXCEED_AVE////",
                 limit_exceeds_average)

    export_value.getAttributes().setYUnits("CFS")
    export_value.getAttributes().setYType("PER-AVER")

    return export_value
Example #39
0
    line = fh.readline()
    while not line.startswith("DATE"):
        line = fh.readline()
    line = fh.readline()
    line = fh.readline()
    tf = TimeFactory.getInstance()
    timeinterval = "IR-DAY"
    vals = []
    tvals = []
    ncount = 0
    while line != None or line.strip() != "":
        fields = line.split()
        if len(fields) < 4:
            print "No data in line #%d: %s" % (ncount, line)
            break
        else:
            date, time = fields[0:2]
            vtime = tf.createTime(date + " " + time, "MM/dd/yyyy HH:mm:ss")
            tvals.append(vtime.create(vtime))
            vals.append(float(fields[3]))
        ncount = ncount + 1
        line = fh.readline()
    fh.close()
    attr = DataSetAttr(DataType.IRREGULAR_TIME_SERIES, "", "UMHOS/CM", "TIME", "INST-VAL")
    return IrregularTimeSeries("TIME SERIES", tvals, vals, None, attr)


if __name__ == "__main__":
    ts = read_ts("d:/data/usgs_ec/11455780.uv.95.lower.rdb")
    writedss("D:/data/usgs_ec/USGS_ECstations.dss", "/USGS/BENICIA/EC//IR-DAY/11455780-lower/", ts)
Example #40
0
        else:
            tw = prepro_window("16yr")
        tws = str(tw)
        print "Using time window: %s (dcc processing may exceed your run dates)" % tws
        
        dcc.dccOp(
            getAttr('CALSIMFILE'),              # CALSIM DSS file (input for DSM2)
            getAttr('GATEFILE'),                # processed gate DSS file (will be input for DSM2)
            '/CALSIM/DXC/GATE-DAYS-OPEN//1MON//' + getAttr('CALSIMSTUDY') + '/', # CALSIM DXC pathname
            '/CALSIM-PROCESSED/DCC/OP//IR-YEAR/' +   \
            getAttr('CALSIMSTUDY') + '/', # processed cross channel pathname
            0,                                  # 0: CALSIM input is hardwired to 30-day months
            1,                                  # operate gate between 0 & 1
            tws                                 # time window
            )

        print 'Copying gate ops for Clifton Court'
        path='/PLANNING\+GATE/CHWST000/OP-FROM-NODE//IR-YEAR/%s/' \
              % getAttr("CLIFTONCT_GATEOP")
        f=vdss.opendss(getAttr('CLIFTONCT_GATEFILE')) 
        g=vdss.findpath(f,path)
        if ( not g or len(g) != 1):
            raise "Path not found or not unique: %s" % (path)
        ts=DataReference.create(g[0]).getData()
        vdss.writedss(getAttr('GATEFILE'),path.replace("\\",""),ts)
        print "Finished with clifton court transfer"
        sys.exit(0)
#


Example #41
0
def calculate_exports(limit,average_value):
    """Determines pulse and non-pulse export flows
      for cvp or swp given the refined limits on the
      export and the average for one of the projects.
      This routine assures that the limit is
      only used if it will reduce pumping 
      and that the pulse and non-pulse flows combine to 
      give the correct total monthly average pumping.
      
      Arguments:
      limit: time series of (refined) limits 
                     on exports for the project
                     (cvp or swp) being analyzed.
      average_value: time series of monthly average pumping 
                     for the project
      
      Output:
      export_value:  Time series of actual exports.
      
    """
    total_time_in_month,pulse_time_in_month,non_pulse_time_in_month = \
      calculate_vamp_times(limit)

    # Calculate a volumetrically correct non-pulse flow given that average_value gives 
    # the total volume of pumping for the month and that pulse pumping is at the limit
    limit_volume=limit*pulse_time_in_month
    total_volume = average_value*total_time_in_month
    non_pulse_volume = total_volume - limit_volume
    non_pulse_flow=non_pulse_volume/non_pulse_time_in_month
    volume_corrected_limit = replace_vamp(limit, # replace shoulder first
                                                     non_pulse_flow,  
                                                     include_shoulder=1)  
    volume_corrected_limit=replace_vamp(volume_corrected_limit, #now correct pulse period
                                                              limit,
                                                              include_shoulder=0)
    
   
    # Create an indicator time series to show months where the  pulse pumping 
    # limit is greater than average (the  pulse pumping limit should be a curtailment).
    # Note that this is converted to daily by "spreading it out" over the days, but it
    # is an indicator of a monthly condition and every day has the same value.
    limit_exceeds_average = limit > average_value
    limit_exceeds_average=interpolate(limit_exceeds_average,'1DAY')
    average_value=interpolate(average_value,'1DAY')
    # Now use average_value for months where the pulse limit would lead to an increase
    # and the volume-corrected pulse/non-pulse combination otherwise.
    export_value = ts_where(limit_exceeds_average,average_value, volume_corrected_limit)
    if (DEBUG):
        writedss("out.dss","/EXP/CVP/EXPORT////",export_value)        
        writedss("out.dss","/EXP/CVP/VCL////",volume_corrected_limit)
        writedss("out.dss","/EXP/CVP/NONPULSE////",interpolate(non_pulse_flow,'1DAY'))    
        writedss("out.dss","/EXP/CVP/AVE////",average_value)
        writedss("out.dss","/EXP/CVP/LIM////",interpolate(limit,'1DAY'))
        writedss("out.dss","/EXP/CVP/LIM_EXCEED_AVE////",limit_exceeds_average)
	
    export_value.getAttributes().setYUnits("CFS")
    export_value.getAttributes().setYType("PER-AVER")
    
    return export_value
Example #42
0
def dss_store_ts(file,path,ts):
    writedss(file,path,ts)
Example #43
0
        if int(sdate[5:]) < 1974:
            tw = prepro_window("82yr")
        else:
            tw = prepro_window("16yr")
        tws = str(tw)
        print "Using time window: %s (dcc processing may exceed your run dates)" % tws

        dcc.dccOp(
            getAttr('CALSIMFILE'),              # CALSIM DSS file (input for DSM2)
            getAttr('GATEFILE'),                # processed gate DSS file (will be input for DSM2)
            '/CALSIM/DXC/GATE-DAYS-OPEN//1MON//' + getAttr('CALSIMSTUDY') + '/', # CALSIM DXC pathname
            '/CALSIM-PROCESSED/DCC/OP//IR-YEAR/' +   \
            getAttr('CALSIMSTUDY') + '/', # processed cross channel pathname
            0,                                  # 0: CALSIM input is hardwired to 30-day months
            1,                                  # operate gate between 0 & 1
            tws                                 # time window
            )

        print 'Copying gate ops for Clifton Court'
        path='/PLANNING\+GATE/CHWST000/OP-FROM-NODE//IR-YEAR/%s/' \
              % getAttr("CLIFTONCT_GATEOP")
        f = vdss.opendss(getAttr('CLIFTONCT_GATEFILE'))
        g = vdss.findpath(f, path)
        if (not g or len(g) != 1):
            raise "Path not found or not unique: %s" % (path)
        ts = DataReference.create(g[0]).getData()
        vdss.writedss(getAttr('GATEFILE'), path.replace("\\", ""), ts)
        print "Finished with clifton court transfer"
        sys.exit(0)
#
def smooth_flow2():
    """ A slightly smoothed version of monthly flows to avoid sharp transitions
        between months. Uses a tension spline.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file

    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    fpart_mod = calsim_study_fpart(modify=1)

    tw = prepro_window()

    print "Preparing SAC and SJR Inflows"

    sjr_path = calsim_path("C639")
    sjr_ret_path = calsim_path("R644")
    sjr_ref = findpath(f, sjr_path)
    if not sjr_ref:
        raise "San Joaquin path %s not found" % sjr_path
    sjr_ret_ref = findpath(f, sjr_ret_path)
    if not sjr_ret_ref:
        raise "San Joaquin return flow path %s not found" % sjr_ret_path
    sjr = DataReference.create(sjr_ref[0], tw)  # .getData()
    sjr_ret = DataReference.create(sjr_ret_ref[0], tw)  # .getData()
    sjr_in = sjr + sjr_ret

    sac_path = calsim_path("C169")
    frwa_div1_path = calsim_path("D168B")
    frwa_div2_path = calsim_path("D168C")
    sac_ref = findpath(f, sac_path)
    if not sac_ref:
        raise "Sacramento path %s not found" % sac_path
    frwa_div1_ref = findpath(f, frwa_div1_path)
    if not frwa_div1_ref:
        raise "Freeport Regional Water Authority Diversion path %s not found" % frwa_div1_path
    frwa_div2_ref = findpath(f, frwa_div2_path)
    if not frwa_div2_ref:
        raise "Freeport Regional Water Authority Diversion path %s not found" % frwa_div2_path
    sac = DataReference.create(sac_ref[0], tw)  # .getData()
    frwa_div1 = DataReference.create(frwa_div1_ref[0], tw)  # .getData()
    frwa_div2 = DataReference.create(frwa_div2_ref[0], tw)  # .getData()
    sac_in = sac + frwa_div1 + frwa_div2

    #    sjr_sac(sjr_inf,sac_inf)

    print "smoothing SAC"
    sac_mon = sac_in.getData()
    if sac_mon:
        if len(sac_mon) < 4:
            raise "Length of monthly data too short for smoothing. Wrong time window?"
        try:
            sac_day = conserve.conserveSpline(sac_mon, "1DAY")
        except:
            print "Failure to smooth path: %s over time window: %s" % ("C169_D168B_D168C", tw)
            raise

        sac_day.getAttributes().setYUnits(Units.CFS)
        writedss(outfile, "/CALSIM-SMOOTH/C169_D168B_D168C/FLOW/1DAY//" + fpart_mod + "/", sac_day)
    else:
        raise "Failure to find monthly data for C169+D168B+D168C"

    print "smoothing SJR"
    sjr_mon = sjr_in.getData()
    if sjr_mon:
        if len(sjr_mon) < 4:
            raise "Length of monthly data too short for smoothing. Wrong time window?"
        try:
            sjr_day = conserve.conserveSpline(sjr_mon, "1DAY")
        except:
            print "Failure to smooth path: %s over time window: %s" % ("C639_R644", tw)
            raise

        sjr_day.getAttributes().setYUnits(Units.CFS)
        writedss(outfile, "/CALSIM-SMOOTH/C639_R644/FLOW/1DAY//" + fpart_mod + "/", sjr_day)
    else:
        raise "Failure to find monthly data for C639+R644"
def planning_ec_mtz(): # MTZ = RSAC054 BC for the qual
    DEBUG = 0
    OUTPUT=config.getAttr('QUALBOUNDARYFILE')
    calsimfile = config.getAttr('CALSIMFILE')
    vamp_corrected_dss = config.getAttr('CALSIM_VAMP')
    CALSIM=opendss(calsimfile)
    PLANNINGTIDE=opendss(config.getAttr('STAGE_SOURCE_FILE'))
    STEP=string.lower(config.getAttr('CALSIMSTEP'))
    SJR_PROCESS=config.getAttr("SJR_PROCESS")    
    outputpath="/FILL+CHAN/RSAC054/EC//15MIN/"+config.getAttr("DSM2MODIFIER")+"/"
    if not(OUTPUT and os.path.exists(OUTPUT)):
        raise "Envvar QUALBOUNDARYFILE must exist as destination for EC"
        
    startyr=int(config.getAttr('START_DATE')[5:])
    endyr=int(config.getAttr('END_DATE')[5:])
    
    if (startyr < 1974 and endyr > 1991):
        blocks= [ "01NOV1921 0000 - 01OCT1940 0000",
             "01OCT1940 0000 - 01OCT1960 0000",
             "01OCT1960 0000 - 01OCT1974 0000",
             "01OCT1974 0000 - 01OCT1991 0000",
             "01OCT1991 0000 - 01OCT2003 0000"
                ]
    else: 
        blocks = [ "01OCT1974 0000 - 01OCT1991 0000" ]
                                                      # for memory reasons (year 2001).

    g0=5000.                                          # initial value of g (antecedent outflow) for the beginning
                                                      # of the first year. This is pretty arbitrary and makes little difference
    if DEBUG:
        g0_no_vamp = 5000.

    for twstr in blocks:    
        TWIND=timewindow(twstr)        # Actual period to be estimated
        print "Calculating boundary salinity for the period "+TWIND.toString()
        TWINDBUF=grow_window(TWIND,"1MON","1MON")     # Conservative buffered period for retrieval
                                                      # so that after prelimiary operations (e.g. time average)
                                                      # time series will still span at least TWIND
        fpart=calsim_study_fpart(modify=0)
        ndo=DataReference.create(findpath(CALSIM,"/CALSIM/NDO/FLOW-NDO//"+STEP+"/"
                                  +fpart+"/")[0],TWIND).getData()
        ndo15=conserve.conserveSpline(ndo,"15MIN")
        ndo15_no_vamp = 0
        if DEBUG:
            ndo15_no_vamp = ndo15
        # calc  vamp caused ndo change
        if (SJR_PROCESS.upper()=="SINGLE_STEP") or (SJR_PROCESS.upper()=="MULTI_STEP"):
            fpart_modified=calsim_study_fpart(modify=1)
            delta_ndo = vamp_ndo.calc_vamp_delta_ndo(calsimfile,vamp_corrected_dss,fpart,fpart_modified,SJR_PROCESS)
            ndo15 = ndo15 + interpolate(delta_ndo, "15MIN")
		
        astro_stage_version = config.getAttr("ASTRO_STAGE_VERSION")
        mtzastro=DataReference.create(findpath(PLANNINGTIDE,"/FILL\+CHAN/RSAC054/STAGE//15MIN/"+astro_stage_version + "/")[0],TWINDBUF).getData()

        astrorms=godin((mtzastro*mtzastro)**0.5)           # RMS energy of tide (used to predict filling and draining)
        dastrorms=(  (astrorms-(astrorms>>1))*96. ).createSlice(TWIND)    
        fifteenflo2=ndo15  - 40000*(dastrorms)

        # call to ec estimator. all parameters are included. g0 is an
        [mtzecest, g1]=ec_boundary.ECEst(mtzastro,fifteenflo2,beta=600,npow1=0.75,npow2=1,g0=g0,zrms=astrorms)
        
        if DEBUG:
            fifteenflo2_no_vamp = ndo15_no_vamp  - 40000*(dastrorms)
            [mtzecest_no_vamp, g1_no_vamp]=ec_boundary.ECEst(mtzastro,fifteenflo2_no_vamp,beta=600,npow1=0.75,npow2=1,g0=g0_no_vamp,zrms=astrorms)
            g0_no_vamp = g1_no_vamp
            writedss("out_ec_check","/CALC/ndo_no_vamp/ndo////", ndo15_no_vamp)
            writedss("out_ec_check","/CALC/ndo_with_vamp/ndo////", ndo15)
            writedss("out_ec_check","/CALC/ndo_no_vamp/ec////", mtzecest_no_vamp)
            writedss("out_ec_check","/CALC/ndo_with_vamp/ec////", mtzecest)
        writedss(OUTPUT,outputpath,mtzecest)
            
        g0=g1
    return 0
Example #46
0
import sys
import config
import time
import jarray,math
import vutils
from vista.time import TimeFactory, TimeInterval,Time
from vista.set import DataReference, Units, Pathname
from vdss import opendss,findpath,writedss,find
from vtimeseries import timewindow,timeinterval
from config import getAttr,setConfigVars
from calsim_study_fpart import calsim_study_fpart
from planning_time_window import prepro_window
from jarray import zeros,array
from vista.set import RegularTimeSeries,DataSetAttr,DataType,Constants
from vdisplay import plot,tabulate
from vmath import per_avg, per_max, mov_avg, godin, per_min
from vutils import Constants, RegularTimeSeries
from transfer import transfer
configfile = sys.argv[1]      # configuration file
param = sys.argv[2]       # param is the second argument and can be "VOL_FP","EC_FP","200nodes_FP"
sind = int(sys.argv[3])   # start index of DSS Records
eind = int(sys.argv[4])   # end index of DSS Records
setConfigVars(configfile)
DSSENVVAR = "QUAL_OUTDSS_%s" % param
infile  = getAttr(DSSENVVAR)
g = opendss(infile)
outfile = "%s_PostPro.DSS"% infile[:-4]
print "Processing Results for..."
for n in range(sind-1,eind,1):
	ref = g[n]
Example #47
0
    from ConfigParser import ConfigParser
    config = ConfigParser()
    config.read(config_file)
    tidefile = config.get('default', 'tidefile')
    cranges_raw = config.get('default', 'channel_ranges')
    try:
        twstr = config.get('default', 'timewindow')
    except:
        twstr = None
    try:
        reservoir_names = config.get('default', 'reservoir_names')
        reservoir_names = string.split(reservoir_names, ',')
    except:
        reservoir_names = None
    print 'Calculating volume from tidefile: %s' % tidefile
    print 'Channel ranges: %s' + cranges_raw
    #channel_ranges=[(54,105),(183,203),(125,145),(204,225),(217,231),(233,235),(252,257)]
    #channel_ranges=[(54,82),(84,105),(183,203),(125,145),(204,214),(216,231),(233,235),(252,259)]
    channel_ranges = eval(cranges_raw)
    volumes = get_volumes_data(tidefile, channel_ranges, twstr)
    if reservoir_names != None:
        reservoir_volumes = get_reservoir_volumes_data(tidefile,
                                                       reservoir_names, twstr)
        volumes.extend(reservoir_volumes)
    total_volume = total(volumes)
    outdssfile = config.get('default', 'output_dss_file')
    outdsspath = config.get('default', 'output_dss_path')
    print 'Writing out to %s as %s' % (outdssfile, outdsspath)
    writedss(outdssfile, outdsspath, total_volume)
    exit(0)
#
Example #48
0
def smooth_flow2():
    """ A slightly smoothed version of monthly flows to avoid sharp transitions
        between months. Uses a tension spline.
    """
    calsimfile = getAttr("CALSIMFILE")
    f = opendss(calsimfile)  # open CALSIM file

    outfile = getAttr("BOUNDARYFILE")
    if not outfile or outfile == "":
        raise "Config variable BOUNDARYFILE not set and needed for prepro output"
    fpart_mod = calsim_study_fpart(modify=1)

    tw = prepro_window()

    print "Preparing SAC and SJR Inflows"

    sjr_path = calsim_path("C639")
    sjr_ret_path = calsim_path("R644")
    sjr_ref = findpath(f, sjr_path)
    if not sjr_ref:
        raise "San Joaquin path %s not found" % sjr_path
    sjr_ret_ref = findpath(f, sjr_ret_path)
    if not sjr_ret_ref:
        raise "San Joaquin return flow path %s not found" % sjr_ret_path
    sjr = DataReference.create(sjr_ref[0], tw)  #.getData()
    sjr_ret = DataReference.create(sjr_ret_ref[0], tw)  #.getData()
    sjr_in = sjr + sjr_ret

    sac_path = calsim_path("C169")
    frwa_div1_path = calsim_path("D168B")
    frwa_div2_path = calsim_path("D168C")
    sac_ref = findpath(f, sac_path)
    if not sac_ref:
        raise "Sacramento path %s not found" % sac_path
    frwa_div1_ref = findpath(f, frwa_div1_path)
    if not frwa_div1_ref:
        raise "Freeport Regional Water Authority Diversion path %s not found" % frwa_div1_path
    frwa_div2_ref = findpath(f, frwa_div2_path)
    if not frwa_div2_ref:
        raise "Freeport Regional Water Authority Diversion path %s not found" % frwa_div2_path
    sac = DataReference.create(sac_ref[0], tw)  #.getData()
    frwa_div1 = DataReference.create(frwa_div1_ref[0], tw)  #.getData()
    frwa_div2 = DataReference.create(frwa_div2_ref[0], tw)  #.getData()
    sac_in = sac + frwa_div1 + frwa_div2

    #    sjr_sac(sjr_inf,sac_inf)

    print "smoothing SAC"
    sac_mon = sac_in.getData()
    if sac_mon:
        if len(sac_mon) < 4:
            raise "Length of monthly data too short for smoothing. Wrong time window?"
        try:
            sac_day = conserve.conserveSpline(sac_mon, "1DAY")
        except:
            print "Failure to smooth path: %s over time window: %s" % (
                "C169_D168B_D168C", tw)
            raise

        sac_day.getAttributes().setYUnits(Units.CFS)
        writedss(outfile,
                 "/CALSIM-SMOOTH/C169_D168B_D168C/FLOW/1DAY//" \
                     +fpart_mod+"/",sac_day)
    else:
        raise "Failure to find monthly data for C169+D168B+D168C"

    print "smoothing SJR"
    sjr_mon = sjr_in.getData()
    if sjr_mon:
        if len(sjr_mon) < 4:
            raise "Length of monthly data too short for smoothing. Wrong time window?"
        try:
            sjr_day = conserve.conserveSpline(sjr_mon, "1DAY")
        except:
            print "Failure to smooth path: %s over time window: %s" % (
                "C639_R644", tw)
            raise

        sjr_day.getAttributes().setYUnits(Units.CFS)
        writedss(outfile,
             "/CALSIM-SMOOTH/C639_R644/FLOW/1DAY//" \
                 +fpart_mod+"/",sjr_day)
    else:
        raise "Failure to find monthly data for C639+R644"
Example #49
0
def planning_ec_mtz():  # MTZ = RSAC054 BC for the qual
    DEBUG = 0
    OUTPUT = config.getAttr('QUALBOUNDARYFILE')
    calsimfile = config.getAttr('CALSIMFILE')
    CALSIM = opendss(calsimfile)
    PLANNINGTIDE = opendss(config.getAttr('STAGE_SOURCE_FILE'))
    outputpath = "/FILL+CHAN/RSAC054/EC//15MIN/" + config.getAttr(
        "DSM2MODIFIER") + "/"
    if not (OUTPUT and os.path.exists(OUTPUT)):
        raise "Envvar QUALBOUNDARYFILE must exist as destination for EC"

    startyr = int(config.getAttr('START_DATE')[5:])
    endyr = int(config.getAttr('END_DATE')[5:])

    if (startyr < 1974 and endyr > 1991):
        blocks = [
            "01NOV1921 0000 - 01OCT1940 0000",
            "01OCT1940 0000 - 01OCT1960 0000",
            "01OCT1960 0000 - 01OCT1974 0000",
            "01OCT1974 0000 - 01OCT1991 0000",
            "01OCT1991 0000 - 01OCT2003 0000"
        ]  # for memory reasons (year 2001).
    else:
        blocks = ["01OCT1974 0000 - 01OCT1991 0000"]

    g0 = 5000.  # initial value of g (antecedent outflow) for the beginning
    # of the first year. This is pretty arbitrary and makes little difference

    for twstr in blocks:
        TWIND = timewindow(twstr)  # Actual period to be estimated
        print "Calculating boundary salinity for the period " + TWIND.toString(
        )
        TWINDBUF = grow_window(
            TWIND, "1MON",
            "1MON")  # Conservative buffered period for retrieval
        # so that after prelimiary operations (e.g. time average)
        # time series will still span at least TWIND
        fpart = calsim_study_fpart(modify=0)

        ndo15 = DataReference.create(
            findpath(CALSIM, "/CALSIM/NDO/FLOW-NDO//15MIN/" + fpart + "/")[0],
            TWIND).getData()

        astro_stage_version = config.getAttr("ASTRO_STAGE_VERSION")
        mtzastro = DataReference.create(
            findpath(
                PLANNINGTIDE, "/FILL\+CHAN/RSAC054/STAGE//15MIN/" +
                astro_stage_version + "/")[0], TWINDBUF).getData()
        if 'NAVD' in astro_stage_version:
            mtzastro = mtzastro - 2.68
            writedss(
                CALSIM, "/FILL\+CHAN/RSAC054/STAGE//15MIN/" +
                astro_stage_version.replace('NAVD', 'NGVD') + "/", mtzastro)

        astrorms = godin(
            (mtzastro * mtzastro)**
            0.5)  # RMS energy of tide (used to predict filling and draining)
        dastrorms = ((astrorms - (astrorms >> 1)) * 96.).createSlice(TWIND)
        fifteenflo2 = ndo15 - 53411.1 * (dastrorms)

        # call to ec estimator. all parameters are included.
        so, sb = 37196, 2328.1
        c = [
            -6.00E-05, 7.30E-05, -1.00E-05, -3.00E-05, 1.70E-06, -1.00E-04,
            4.50E-05, -1.00E-04
        ]
        [mtzecest, g1] = ec_boundary.ECEst(mtzastro,
                                           fifteenflo2,
                                           so,
                                           sb,
                                           beta=420.5205,
                                           npow1=0.7750588,
                                           npow2=1,
                                           g0=g0,
                                           zrms=astrorms,
                                           c=c)

        writedss(OUTPUT, outputpath, mtzecest)

        g0 = g1
    return 0
def doall(dicufile,bndryfile):
    sum=net_dicu(dicufile,[334,333,332,331,330])
    pathname=sum.getPathname().toString()
    pathname=set_part(pathname,'NET_DICU_NORTH_OF_FREEPORT',Pathname.B_PART)
    pathname=set_part(pathname,'FLOW',Pathname.C_PART)
    writedss(dicufile,pathname,sum.getData())
Example #51
0
import sys
import config
import time
import jarray, math
import vutils
from vista.time import TimeFactory, TimeInterval, Time
from vista.set import DataReference, Units, Pathname
from vdss import opendss, findpath, writedss, find
from vtimeseries import timewindow, timeinterval
from config import getAttr, setConfigVars
from calsim_study_fpart import calsim_study_fpart
from planning_time_window import prepro_window
from jarray import zeros, array
from vista.set import RegularTimeSeries, DataSetAttr, DataType, Constants
from vdisplay import plot, tabulate
from vmath import per_avg, per_max, mov_avg, godin, per_min
from vutils import Constants, RegularTimeSeries
from transfer import transfer
configfile = sys.argv[1]  # configuration file
param = sys.argv[
    2]  # param is the second argument and can be "VOL_FP","EC_FP","200nodes_FP"
sind = int(sys.argv[3])  # start index of DSS Records
eind = int(sys.argv[4])  # end index of DSS Records
setConfigVars(configfile)
DSSENVVAR = "QUAL_OUTDSS_%s" % param
infile = getAttr(DSSENVVAR)
g = opendss(infile)
outfile = "%s_PostPro.DSS" % infile[:-4]
print "Processing Results for..."
for n in range(sind - 1, eind, 1):