def __init__(self, configString=""):
        """ Initialize by parsing a a config file  line

        Parameters
        ----------
        configString : str
           Config file line, or empty
        """

        # init to empty
        self._empty = True
        self._issue = datetime.datetime
        self._valid = datetime.datetime
        self._hrrr = False
        self._rap = False
        self._layered = False
        self._clockTime = datetime.datetime
        if not configString:
            return

        # parse
        self._empty = False
        self._issue=datetime.datetime.strptime(configString[0:10], "%Y%m%d%H")
        self._valid=datetime.datetime.strptime(configString[11:23],
                                              "%Y%m%d%H%M")
        self._hrrr=df.stringToBool(configString[24:25])
        self._rap=df.stringToBool(configString[26:27])
        self._layered=df.stringToBool(configString[28:29])
        self._clockTime = datetime.datetime.strptime(configString[30:],
                                                     "%Y-%m-%d_%H:%M:%S")
Example #2
0
    def initFromStateFile(self, confFile):

        # parse
        cf = SafeConfigParser()
        cf.read(confFile)
        self._step = []
        self._first = True
        self._empty = True
        status = df.stringToBool(cf.get('status', 'first'))
        if (status):
            return
        self._empty = False
        self._first = df.stringToBool(cf.get('forecast', 'first'))
        stime = cf.get('forecast', 'issue_time')
        self._issue = datetime.datetime.strptime(stime, "%Y%m%d%H")

        self._layered = df.stringToBool(
            cf.get('forecast', 'layered', self._layered))
        fs0 = ForecastStep(cf.get('forecast', stepStaticName(0)))
        fs1 = ForecastStep(cf.get('forecast', stepStaticName(1)))
        fs2 = ForecastStep(cf.get('forecast', stepStaticName(2)))
        self._step.append(fs0)
        self._step.append(fs1)
        self._step.append(fs2)
        stime = cf.get('forecast', 'clock_time')
        self._clockTime = datetime.datetime.strptime(stime,
                                                     '%Y-%m-%d_%H:%M:%S')
   def _analyzeNewest(self, dataNewest, stateNewest, hoursBack, fileType):
      if (dataNewest <= stateNewest):
         return

      # see if issue time has increased and if so, purge old stuff
      # create DataFile objects, which requires breaking the full
      # file into yymmdd/filename
      sind = stateNewest.find('/')
      if (sind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + stateNewest)
      nind = dataNewest.find('/')
      if (nind < 0):
         raise FileNameMatchError('Cannot parse directory from ' + dataNewest)
      symd = stateNewest[:sind]
      sfile = stateNewest[sind+1:]
      nymd = dataNewest[:nind]
      nfile = dataNewest[nind+1:]
      WhfLog.debug("Checking %s / %s  against %s / %s", symd, sfile, nymd, nfile)
      try:
         df0 = df.DataFile(symd, sfile, fileType)
         df1 = df.DataFile(nymd, nfile, fileType)
      except FilenameMatchError as fe:
         WhfLog.debug("Cannot update due to %s", fe)
      except InvalidArgumentError as ie:
         WhfLog.debug("Cannot update due to %s", ie)

      if (df0._time.inputIsNewerIssueHour(df1._time)):
         WhfLog.debug("%s Issue hour has increased, purge now", fileType)
         self._update(df1._time, hoursBack, fileType)
    def initFromStateFile(self, confFile):

        # parse
        cf = SafeConfigParser()
        cf.read(confFile)
        self._step = []
        self._first = True
        self._empty = True
        status = df.stringToBool(cf.get('status', 'first'))
        if (status):
            return
        self._empty = False
        self._first = df.stringToBool(cf.get('forecast', 'first'))
        stime = cf.get('forecast', 'issue_time')
        self._issue = datetime.datetime.strptime(stime, "%Y%m%d%H")

        self._layered = df.stringToBool(cf.get('forecast', 'layered', self._layered))
        fs0 = ForecastStep(cf.get('forecast', stepStaticName(0)))
        fs1 = ForecastStep(cf.get('forecast', stepStaticName(1)))
        fs2 = ForecastStep(cf.get('forecast', stepStaticName(2)))
        self._step.append(fs0)
        self._step.append(fs1)
        self._step.append(fs2)
        stime = cf.get('forecast', 'clock_time')
        self._clockTime = datetime.datetime.strptime(stime, '%Y-%m-%d_%H:%M:%S')
    def __init__(self, configString=""):
        """ Initialize by parsing a a config file  line

        Parameters
        ----------
        configString : str
           Config file line, or empty
        """

        # init to empty
        self._empty = True
        self._issue = datetime.datetime
        self._valid = datetime.datetime
        self._hrrr = False
        self._rap = False
        self._layered = False
        self._clockTime = datetime.datetime
        if not configString:
            return

        # parse
        self._empty = False
        self._issue = datetime.datetime.strptime(configString[0:10],
                                                 "%Y%m%d%H")
        self._valid = datetime.datetime.strptime(configString[11:23],
                                                 "%Y%m%d%H%M")
        self._hrrr = df.stringToBool(configString[24:25])
        self._rap = df.stringToBool(configString[26:27])
        self._layered = df.stringToBool(configString[28:29])
        self._clockTime = datetime.datetime.strptime(configString[30:],
                                                     "%Y-%m-%d_%H:%M:%S")
def init(parser, logFileName, configType, action, data):
    """Initialize log file using configFile content, and a log file name

    Parameters
    ----------
    parser : SafeConfigParser
        parser that has parsed the file on entry
    logFileName : str
        Name of the log file, without a .log suffix
    configType : str
        Short, Medium, Long, AA
    action : str
        Regrid, Layer
    data : str
        HRRR, RAP, MRMS, GFS, CFS
    """

    logging_level = parser.get('log_level', 'forcing_engine_log_level')
    # Set the logging level based on what was defined in the parm/config file
    if logging_level == 'DEBUG':
        set_level = logging.DEBUG
    elif logging_level == 'INFO':
        set_level = logging.INFO
    elif logging_level == 'WARNING':
        set_level = logging.WARNING
    elif logging_level == 'ERROR':
        set_level = logging.ERROR
    else:
        set_level = logging.CRITICAL

    # log files written to configured place with yyyymmdd subdirectory
    logging_path = parser.get('log_level', 'forcing_engine_log_dir')
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)
    logging_path += "/"
    now = datetime.datetime.utcnow()
    logging_path += now.strftime("%Y%m%d")
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)

    # we have two log files, one for python, one for ncl
    logging_filename = logging_path + "/" + logFileName + ".log"
    ncl_logging_filename = logging_path + "/" + logFileName + ".ncl.log"
    setup_logger('main', logging_filename, set_level)
    setup_logger('ncl', ncl_logging_filename, set_level)

    # set the global var's to inputs, padded to correct length
    #(so logging lines up nice)
    global WhfConfigType
    WhfConfigType = configType
    WhfConfigType = WhfConfigType.ljust(WhfConfigTypeLen)

    global WhfAction
    WhfAction = action
    WhfAction = WhfAction.ljust(WhfActionLen)

    global WhfData
    WhfData = data
    WhfData = WhfData.ljust(WhfDataLen)
def init(parser, logFileName, configType, action, data):
    """Initialize log file using configFile content, and a log file name

    Parameters
    ----------
    parser : SafeConfigParser
        parser that has parsed the file on entry
    logFileName : str
        Name of the log file, without a .log suffix
    configType : str
        Short, Medium, Long, AA
    action : str
        Regrid, Layer
    data : str
        HRRR, RAP, MRMS, GFS, CFS
    """

    logging_level = parser.get('log_level', 'forcing_engine_log_level')
    # Set the logging level based on what was defined in the parm/config file
    if logging_level == 'DEBUG':
        set_level = logging.DEBUG
    elif logging_level == 'INFO':
        set_level = logging.INFO
    elif logging_level == 'WARNING':
        set_level = logging.WARNING
    elif logging_level == 'ERROR':
        set_level = logging.ERROR
    else:
        set_level = logging.CRITICAL

    # log files written to configured place with yyyymmdd subdirectory
    logging_path = parser.get('log_level', 'forcing_engine_log_dir')
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)
    logging_path += "/"
    now = datetime.datetime.utcnow()
    logging_path += now.strftime("%Y%m%d")
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)

    # we have two log files, one for python, one for ncl
    logging_filename =  logging_path + "/" + logFileName + ".log" 
    ncl_logging_filename =  logging_path + "/" + logFileName + ".ncl.log" 
    setup_logger('main',  logging_filename, set_level)
    setup_logger('ncl',  ncl_logging_filename, set_level)
    
    # set the global var's to inputs, padded to correct length
    #(so logging lines up nice)
    global WhfConfigType
    WhfConfigType = configType
    WhfConfigType = WhfConfigType.ljust(WhfConfigTypeLen)

    global WhfAction
    WhfAction = action
    WhfAction = WhfAction.ljust(WhfActionLen)
    
    global WhfData
    WhfData = data
    WhfData = WhfData.ljust(WhfDataLen)
Example #8
0
def init(parser, which, initAll):
    """Initialize log file using configFile content, and a log file name

    Parameters
    ----------
    parser : SafeConfigParser
        parser that has parsed the file on entry
    logFileName : str
        Name of the log file, without a .log suffix
    configType : str
        Short, Medium, Long, AA
    action : str
        Regrid, Layer
    data : str
        HRRR, RAP, MRMS, GFS, CFS
    """

    logging_level = parser.get('log_level', 'forcing_engine_log_level')
    # Set the logging level based on what was defined in the parm/config file
    if logging_level == 'DEBUG':
        set_level = logging.DEBUG
    elif logging_level == 'INFO':
        set_level = logging.INFO
    elif logging_level == 'WARNING':
        set_level = logging.WARNING
    elif logging_level == 'ERROR':
        set_level = logging.ERROR
    else:
        set_level = logging.CRITICAL

    # log files written to configured place with yyyymmdd subdirectory
    logging_path = parser.get('log_level', 'forcing_engine_log_dir')
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)
    logging_path += "/"
    now = datetime.datetime.utcnow()
    logging_path += now.strftime("%Y%m%d")
    if (not df.makeDirIfNeeded(logging_path)):
        raise SystemCommandError("Cannot create " + logging_path)

    # we have two log files, one for python, one for ncl, for each of the cases
    # string 'RegridHRRR', 'RegridRAP', 'RegridMRMS', 'RegridGFS', 'ShortLayer', 'AaLayer', 'LongRegrid'
    global WhfWhichChoices
    for choice in WhfWhichChoices:
        if (initAll):
            logging_filename =  logging_path + "/" + choice + ".log" 
            ncl_logging_filename =  logging_path + "/" + choice + ".ncl.log" 
            setup_logger(choice + 'main',  logging_filename, set_level)
            setup_logger(choice + 'ncl',  ncl_logging_filename, set_level)
        else:
            if (choice == which):
                logging_filename =  logging_path + "/" + choice + ".log" 
                ncl_logging_filename =  logging_path + "/" + choice + ".ncl.log" 
                setup_logger(choice + 'main',  logging_filename, set_level)
                setup_logger(choice + 'ncl',  ncl_logging_filename, set_level)
    set(which)
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Fcst: empty=%d", int(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Fcst: I:%s V:%s hrrr:%d rap:%d layered:%d clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._valid.strftime("%Y%m%d%H"), df.boolToInt(self._hrrr),
                   df.boolToInt(self._rap), df.boolToInt(self._layered),
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
def main(argv):

    # User must pass the config file into the main driver.
    configFile = argv[0]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1
    # read in fixed main params
    parms = parmRead(configFile)

    # query each directory to get newest thing, and update overall newest
    #WhfLog.debug("Looking in %s and %s", parms._hrrrDir, parms._rapDir)
    newestT = df.newestIssueTime(parms._hrrrDir)
    newestT2 = df.newestIssueTime(parms._rapDir)
    if (not newestT) and (not newestT2):
        WhfLog.debug("NO INPUT DATA available")
        return 0
    
    if (newestT2 > newestT):
        newestT = newestT2
    #WhfLog.debug("Newest issue time = %s", newestT)

    # if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        state = State()
        WhfLog.info("Initializing")
        state.initialSetup(parms)
        state.initialize(parms, newestT)
        state.write(parms._stateFile)

    # Normal processing situation
    #WhfLog.debug("Look for Layering....")
    
    # read in state
    state2 = State()
    state2.initFromStateFile(parms._stateFile)
    if state2.isEmpty():
        # error return here
        return 0
    
    # check for new issue time
    if (state2.isNewModelIssueTime(newestT)):
        WhfLog.info("Re-Initializing state, new model issue time %s", newestT)
        state2.initialize(parms, newestT)

    # update availability
    state2.setCurrentModelAvailability(parms)

    # layer if appropriate
    state2.layerIfReady(parms, configFile)

    # write out final state
    state2.write(parms._stateFile)
    return 0
    def updateWithNew(self, data, hoursBack):
        """ Update internal state with new data

      The dataType is used to determine which part of state to update

      Parameters
      ----------
      data: DataFiles
         The newest data
      hoursBack: int
         Maximum number of hours back to keep data in state

      Returns
      -------
      list[str]
          The data file names that are are newly added to state
      """

        ret = []
        fnames = data.getFnames()
        if (not fnames):
            return ret

        if (self.isEmpty()):
            WhfLog.debug("Adding to empty %s list")
        else:
            sname = self.newest()
            if (not sname):
                WhfLog.error("Expected file, got none")
                return ret
            if (fnames[-1] > sname):
                WhfLog.debug("Newer time encountered")
                # see if issue time has increased and if so, purge old stuff
                # create DataFile objects
                try:
                    df0 = df.DataFile(sname[0:8], sname[9:], 'CFS')
                    df1 = df.DataFile(fnames[-1][0:8], fnames[-1][9:], 'CFS')
                except FilenameMatchError as fe:
                    WhfLog.debug("Skipping file use due to %s", fe)
                except InvalidArgumentError as ie:
                    WhfLog.debug("Skipping file use due to %s", ie)

                if (df0._time.inputIsNewerIssueHour(df1._time)):
                    WhfLog.debug("Issue hour has increased, purge now")
                    self.update(df1._time, hoursBack)

        for f in fnames:
            if (self.addFileIfNew(f)):
                ret.append(f)

        self.sortFiles()
        return ret
Example #12
0
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("Fcst: empty=%d first=%d", df.boolToInt(self._empty),
                  df.boolToInt(self._first))
     if (self._empty):
         return
     WhfLog.debug(
         "Fcst: I:%s step[0]:%s step[1]:%s step[2]:%s layered:%d clockTime=%s",
         self._issue.strftime("%Y%m%d%H"), self._step[0].debugPrintString(),
         self._step[1].debugPrintString(), self._step[2].debugPrintString(),
         df.boolToInt(self._layered),
         self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("Fcst: empty=%d first=%d", df.boolToInt(self._empty), df.boolToInt(self._first))
     if (self._empty):
         return
     WhfLog.debug("Fcst: I:%s step[0]:%s step[1]:%s step[2]:%s layered:%d clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._step[0].debugPrintString(),
                   self._step[1].debugPrintString(),
                   self._step[2].debugPrintString(),
                   df.boolToInt(self._layered),
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
Example #14
0
 def UserSelectStatic():
     ## TODO: Prompt user to set-up their static data
     ## Set API, and optionally outputperline and timeout
     ## Give option to skip directory
     DataFiles.WriteStaticLists(API, baseURL, timeout, directory,
                                outputperline, merchant_list)
     Trawler.LoadLists()
def forecastExists(dir, issueTime, fcstHour):
    """ Check if forecast exists

    Parameters
    ----------
    dir : str
       Full path to the issue time directories
    issueTime : datetime
       The issue time (y,m,d,h)       
    fcstHour:  int
       should be 0 or 3

    Returns
    -------
    bool
    True if the forecast does exist on disk
    """
           
    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        validTime = issueTime + datetime.timedelta(hours=fcstHour)
        fname = validTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s",  fname, path)
                return True
    return False
def regridIfZeroHr(configFile, fileType, fname):
   """If it is a 0 hour forecast (RAP or HRRR) regrid in a special way
   Parameters
   ----------
   configFile : str
      configuration file with all settings
   fileType: str
      HRRR, RAP, ... string
   fname: str
      name of file to regrid and downscale, with yyyymmdd parent dir

   Returns
   -------
   None
   """
   # check for 0 hour by creating a DataFile and checking forecast hour
   try:
      f = df.DataFile(fname[0:8], fname[9:], fileType)
   except FilenameMatchError as fe:
      WhfLog.debug("Cannot check for 0 hour data due to %s", fe)
      raise
   except InvalidArgumentError as ie:
      WhfLog.debug("Cannot check for 0 hour data due to %s", ie)
      raise
   if (f._time._forecastHour == 0):
      WhfLog.setConfigType('AA')
      WhfLog.debug("SPECIAL 0 hour case %s", fname[9:0])
      aaf.forcing(configFile, 'regrid', fileType, fname[9:])
      WhfLog.setConfigType('Short')
def obsExists(dir, issueTime):
    """ Check if obs exists

    Parameters
    ----------
    dir : str
       Full path to the MRMS directories
    issueTime : datetime
       The issue time (y,m,d,h)       

    Returns
    -------
    bool
    True if the data does exist on disk
    """
           
    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        fname = issueTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s",  fname, path)
                return True
    return False
def obsExists(dir, issueTime):
    """ Check if obs exists

    Parameters
    ----------
    dir : str
       Full path to the MRMS directories
    issueTime : datetime
       The issue time (y,m,d,h)       

    Returns
    -------
    bool
    True if the data does exist on disk
    """

    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        fname = issueTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s", fname, path)
                return True
    return False
Example #19
0
	def bar_chart(self, conn, column1, column2, table_chosen, title):
		# since this is a bar graph only two columns will be there

		data_df = dfile.double_selector(conn = conn, table= table_chosen, col1 = column1, col2 = column2)

		bar_plot = ggplot(aes(x=column1, weight=column2), data=data_df) + geom_bar() + labs(title=title)
		print(bar_plot)
def createStateFile(parms, realtime):
    """  Called if there is no state file, look at data dirs and create state

   Parameters
   ----------
   parms: Parms
      Parameter settings

   Returns
   -------
   none

   Writes out the state file after creating it
   """

    WhfLog.info("Initializing")
    state = State("")

    if (realtime):
        # query directory and get newest model run file, then
        # get all for that and previous issue time
        cfs = df.DataFiles(parms._cfsDir, parms._maxFcstHourCfs, "CFS")
        cfs.setNewestFiles(parms._hoursBackCfs)
        for f in cfs._content:
            f.debugPrint("Newest files: CFS")
        state.initialize(cfs)

    # write out file
    state.write(parms._stateFile)
def forecastExists(dir, issueTime, fcstHour):
    """ Check if forecast exists

    Parameters
    ----------
    dir : str
       Full path to the issue time directories
    issueTime : datetime
       The issue time (y,m,d,h)       
    fcstHour:  int
       should be 0 or 3

    Returns
    -------
    bool
    True if the forecast does exist on disk
    """

    path = dir + "/"
    path += issueTime.strftime("%Y%m%d%H")
    if (os.path.isdir(path)):
        validTime = issueTime + datetime.timedelta(hours=fcstHour)
        fname = validTime.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
        names = df.getFileNames(path)
        for n in names:
            if (n == fname):
                WhfLog.debug("Found %s in %s", fname, path)
                return True
    return False
    def _forecastExists(self, dir):
        """ Check if forecast indicated by local state exists

        Parameters
        ----------
        dir : str
           Full path to the issue time directories

        Returns
        -------
        bool
           True if the forecast does exist on disk
        """
           
        path = dir + "/"
        path += self._issue.strftime("%Y%m%d%H")
        if (os.path.isdir(path)):
            fname = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
            names = df.getFileNames(path)
            for n in names:
                if (n == fname):
                    WhfLog.debug("Found %s in %s",  fname, path)
                    return True
            return False
        else:
            return False
def createStateFile(parms, fileType, realtime):
   """  Called if there is no state file, look at data dirs and create state
        in realtime, in non-realtime create an empty state.  Write to file.
   Parameters
   ----------
   parms: Parms
      Parameter settings
   fileType: str
      'HRRR', ...
   realtime: boolean
      True if realtime, False for archive mode

   Returns
   -------
   none

   Writes out the state file after creating it
   """

   WhfLog.info("Initializing")
   state = State("")

   if (realtime):

      # query each directory and get newest model run file for each, then
      # get all for that and previous issue time, this becomes state that
      # is not re-processed, we only look for new stuff
      data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
      data.setNewestFiles(parms._hoursBack)
      for f in data._content:
         f.debugPrint("Newest files: " + fileType)
      state.initialize(data)

   # write out file (at least try to)
   state.write(parms._stateFile, fileType)
    def _forecastExists(self, dir):
        """ Check if forecast indicated by local state exists

        Parameters
        ----------
        dir : str
           Full path to the issue time directories

        Returns
        -------
        bool
           True if the forecast does exist on disk
        """

        path = dir + "/"
        path += self._issue.strftime("%Y%m%d%H")
        if (os.path.isdir(path)):
            fname = self._valid.strftime("%Y%m%d%H%M") + ".LDASIN_DOMAIN1.nc"
            names = df.getFileNames(path)
            for n in names:
                if (n == fname):
                    WhfLog.debug("Found %s in %s", fname, path)
                    return True
            return False
        else:
            return False
Example #25
0
def plot_one(*args):
    def lbox_bind(*args):
        global columns
        curr1 = list_var.curselection()
        if len(curr1) == 1:
            curr2 = int(curr1[0])
            columns[0] = list_columns[curr2]
            column.set(columns[0])

    global table_choice
    column = StringVar()

    # Declaring the list of columns
    list_columns = dfiles.columns(connection_det[1], table_choice)
    list_col_use = StringVar(value=list_columns)

    #Declaring the labels
    label_plot_one = ttk.Label(mainframe,
                               text="For this chart you require one variable")
    label_var = ttk.Label(mainframe, text="Variable:")  # this is y
    label_var_conf = ttk.Label(mainframe, text="Use Axis Variable:")
    label_y = ttk.Label(mainframe, textvariable=column)
    label_title = ttk.Label(mainframe, text="Title of Chart:")

    # Declarig the List box
    list_var = Listbox(mainframe,
                       height=3,
                       width=30,
                       listvariable=list_col_use)

    # Declaring Entrybox for title
    title_string = StringVar()
    title_entry = ttk.Entry(mainframe, width=30, textvariable=title_string)

    #Grid-ing the things
    label_plot_one.grid(column=2, row=8, padx=5, pady=5)
    label_var.grid(column=1, row=9, padx=5, pady=5)
    list_var.grid(column=2, row=9, padx=5, pady=5)
    label_var_conf.grid(column=1, row=10, padx=5, pady=5)
    label_y.grid(column=2, row=10, padx=5, pady=5)
    label_title.grid(column=1, row=11, padx=5, pady=5)
    title_entry.grid(column=2, row=11, padx=5, pady=5)

    for i in range(0, len(list_columns), 2):
        list_var.itemconfigure(i, background='#f0f0ff')

    list_var.bind('<<ListboxSelect>>', lbox_bind)

    def plot_final(*args):
        global graph_chosen, table_choice
        title_string = str(title_entry.get())
        pltr.Graphs(graph=graph_chosen,
                    connection=connection_det[1],
                    column_1=columns[0],
                    table=table_choice,
                    title=title_string)

    # now declaring the buttons
    plot = ttk.Button(mainframe, text="Plot", command=plot_final)
    plot.grid(column=3, row=11, padx=5, pady=5)
def run(fileType, configFile, realtime):
   """ Run the script, process any new data
   Parameters
   ----------
   fileType: str
      'HRRR', ...
   configFile : str
      Name of the file with settings
   realtime : boolean
      True if this is realtime
   Returns
   -------
   1 for error, 0 for success
   """   
   good = False
   regriddable = ['HRRR', 'RAP', 'MRMS', 'GFS']
   if (fileType not in regriddable):
      print 'ERROR unknown file type command arg ', fileType
      return 1

   # User must pass the config file into the main driver.
   if not os.path.exists(configFile):
      print 'ERROR forcing engine config file not found:', configFile
      return 1

   # read in fixed main params
   parms = parmRead(configFile, fileType, realtime)

   #if there is not a state file, create one now using newest
   if (not os.path.exists(parms._stateFile)):
      parms.debugPrint()
      createStateFile(parms, fileType, realtime)
        
   # read in state
   state = State(parms._stateFile, fileType)

      # query each directory and get newest model run file for each, then
   # get all for that and previous issue time
   data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
   data.setNewestFiles(parms._hoursBack)

   # Update the state to reflect changes, returning those files to regrid
   # Regrid 'em
   toProcess = state.lookForNew(data, parms._hoursBack, fileType)
   for f in toProcess:
      try:
         regrid(f, fileType, configFile);
      except:
         WhfLog.error("Could not regrid/downscale %s", f)
      else:
         WhfLog.debug("Adding new file %s, and writing state file", f)
         if (not state.addFileIfNew(f)):
            WhfLog.error("File %s was not new", f)
         else:
            state.write(parms._stateFile, fileType);
          
   # write out state (in case it has not been written yet) and exit
   #state.debugPrint()
   state.write(parms._stateFile, fileType)
   return 0
def run(configFile, realtime):
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1

    # read in fixed main params
    parms = parmRead(configFile, realtime)
    #parms.debugPrint()

    #if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        parms.debugPrint()
        createStateFile(parms, realtime)

    # begin normal processing situation
    WhfLog.debug("....Check for new input data to regid")

    # read in state
    state = State(parms._stateFile)

    # query directory and get newest model run file, then
    # get all for that and previous issue time
    cfs = df.DataFiles(parms._cfsDir, parms._maxFcstHourCfs, "CFS")
    cfs.setNewestFiles(parms._hoursBackCfs)

    # Same with CFS
    toProcess = state.updateWithNew(cfs, parms._hoursBackCfs)
    for f in toProcess:
        regridCFS(configFile, f)

    # write out state and exit
    #state.debugPrint()
    state.write(parms._stateFile)
    return 0
    def setCurrentModelAvailability(self, parms, model):
        """ Change availability status when appropriate by looking at disk

        Parameters
        ----------
        parms : Parms
            parameters
        model : Model
            overall status for this model run, used for clock time

        Returns
        -------
        none
        """

        if (self._layered):
            # no need to do more, already layered
            return

        # make note if going from nothing to something
        nothing = (not self._hrrr) and (not self._rap)

        #if (nothing):
        #WhfLog.debug("Nothing, so trying to get stuff")
        if (not self._hrrr):
            # update HRRR status
            self._hrrr = self._forecastExists(parms._hrrrDir)
        if (not self._rap):
            # update RAP status
            self._rap = self._forecastExists(parms._rapDir)
        if (nothing and (self._hrrr or self._rap)):
            # went from nothing to something, so start the clock
            WhfLog.debug("Starting clock now, hrrr=%d, rap=%d",
                         df.boolToInt(self._hrrr), df.boolToInt(self._rap))
            self._clockTime = datetime.datetime.utcnow()
        else:
            if (nothing and ((not self._hrrr) and (not self._rap))):
                # nothing to nothing, compare current time to time from
                # model input, and squeaky wheel if too long
                tnow = datetime.datetime.utcnow()
                diff = tnow - model._clockTime
                idiff = diff.total_seconds()
                if (idiff > parms._veryLateSeconds):
                    WhfLog.warning(
                        "Inputs for short range layering are very late Issue:%s Valid:%s",
                        self._issue.strftime("%Y%m%d%H"),
                        self._valid.strftime("%Y%m%d%H"))
    def setCurrentModelAvailability(self, parms, model):
        """ Change availability status when appropriate by looking at disk

        Parameters
        ----------
        parms : Parms
            parameters
        model : Model
            overall status for this model run, used for clock time

        Returns
        -------
        none
        """

        if (self._layered):
            # no need to do more, already layered
            return
        
        # make note if going from nothing to something
        nothing = (not self._hrrr) and (not self._rap)

        #if (nothing):
            #WhfLog.debug("Nothing, so trying to get stuff")
        if (not self._hrrr):
            # update HRRR status
            self._hrrr = self._forecastExists(parms._hrrrDir)
        if (not self._rap):
            # update RAP status
            self._rap = self._forecastExists(parms._rapDir)
        if (nothing and (self._hrrr or self._rap)):
            # went from nothing to something, so start the clock
            WhfLog.debug("Starting clock now, hrrr=%d, rap=%d", df.boolToInt(self._hrrr),
                          df.boolToInt(self._rap))
            self._clockTime = datetime.datetime.utcnow()
        else:
            if (nothing and ((not self._hrrr) and (not self._rap))):
                # nothing to nothing, compare current time to time from
                # model input, and squeaky wheel if too long
                tnow = datetime.datetime.utcnow()
                diff = tnow - model._clockTime 
                idiff = diff.total_seconds()
                if (idiff > parms._veryLateSeconds):
                    WhfLog.warning("Inputs for short range layering are very late Issue:%s Valid:%s",
                                  self._issue.strftime("%Y%m%d%H"),
                                  self._valid.strftime("%Y%m%d%H"))
 def writeConfigString(self):
     """ Write local content as a one line string
     Returns
     -------
     str
     """
     if (self._empty):
         # write fake stuff out
         ret = "2015010100,201501010000,0,0,0,2015-01-01_00:00:00"
     else:
         ret = self._issue.strftime("%Y%m%d%H") + ","
         ret += self._valid.strftime("%Y%m%d%H%M")
         ret += ',%d,%d,%d,' %(df.boolToInt(self._hrrr),
                               df.boolToInt(self._rap),
                               df.boolToInt(self._layered))
         ret += self._clockTime.strftime("%Y-%m-%d_%H:%M:%S")
     return ret
 def writeConfigString(self):
     """ Write local content as a one line string
     Returns
     -------
     str
     """
     if (self._empty):
         # write fake stuff out
         ret = "2015010100,201501010000,0,0,0,2015-01-01_00:00:00"
     else:
         ret = self._issue.strftime("%Y%m%d%H") + ","
         ret += self._valid.strftime("%Y%m%d%H%M")
         ret += ',%d,%d,%d,' % (df.boolToInt(
             self._hrrr), df.boolToInt(
                 self._rap), df.boolToInt(self._layered))
         ret += self._clockTime.strftime("%Y-%m-%d_%H:%M:%S")
     return ret
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Model: empty=%d", df.boolToInt(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Model: Issue=%s  clockTime=%s",
                  self._issue.strftime("%Y%m%d%H"),
                  self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
 def debugPrint(self):
     """ WhfLog debug of content
     """
     WhfLog.debug("Model: empty=%d", df.boolToInt(self._empty))
     if (self._empty):
         return
     WhfLog.debug("Model: Issue=%s  clockTime=%s",
                   self._issue.strftime("%Y%m%d%H"),
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
def PlotSCD( files='*.pk' ):
	import pylab
	if isinstance( files, basestring ):
		if os.path.isdir( files ): files = os.path.join( files, '*.pk' )
		files = glob.glob( files )
	d = DataFiles.load( files )
	r = SigTools.correlate( d['x'], d['y'], axis=0 )
	SigTools.imagesc( r*numpy.abs(r), y=d['channels'], x=SigTools.samples2msec( range( r.shape[1] ), d['fs'] ), aspect='auto', balance=0.0, colorbar=True )
	pylab.title( ', '.join( [ '%d: %d' % ( yi, ( d['y'] == yi ).sum() ) for yi in numpy.unique( d['y'] ) ] ) )
	pylab.draw()
Example #35
0
	def area_chart(self, conn, column1 , column2, table_chosen, title):

		data_df = dfile.double_selector(conn=conn, table=table_chosen, col1=column1, col2=column2)

		ymin = float(input("Enter the minimum value that should be plotted:  "))
		ymax = float(input("Enter the maximum value that should be plotted:  "))

		area_plot = ggplot(aes(x=column2, ymin=ymin, ymax=ymax), data=data_df) + geom_area() + theme_gray() + labs(
			title=title)
		print(area_plot)
def PlotTrials( files='*.pk', channel='Cz' ):
	import pylab
	if isinstance( files, basestring ):
		if os.path.isdir( files ): files = os.path.join( files, '*.pk' )
		files = glob.glob( files )
	d = DataFiles.load( files )
	chind = d[ 'channels' ].index( channel )
	v = [ ( d[ 'y' ][ i ], i, vi ) for i, vi in enumerate( d[ 'x' ][ :, chind, : ] ) ]
	v = numpy.array( [ vi for yi, i, vi in sorted( v ) ] )
	SigTools.imagesc( v, x=SigTools.samples2msec( range( v.shape[1] ), d['fs'] ), aspect='auto', balance=0.0, colorbar=True )
	pylab.title( ', '.join( [ '%d: %d' % ( yi, ( d['y'] == yi ).sum() ) for yi in numpy.unique( d['y'] ) ] ) + ( ' (channel %s)' % channel ) )
	pylab.draw()
Example #37
0
    def __init__(self, configString=""):
        """ Initialize by parsing a a config file  line

        Parameters
        ----------
        configString : str
           Config file line, or empty
        """

        # init to empty
        self._empty = True
        self._step = 0
        self._hrrr0 = False
        self._hrrr3 = False
        self._rap0 = False
        self._rap3 = False
        self._mrms = False
        self._layered = False
        if not configString:
            return

        # parse
        self._empty = False
        self._step = int(configString[0:1])
        self._hrrr0 = df.stringToBool(configString[2:3])
        self._hrrr3 = df.stringToBool(configString[4:5])
        self._rap0 = df.stringToBool(configString[6:7])
        self._rap3 = df.stringToBool(configString[8:9])
        self._mrms = df.stringToBool(configString[10:11])
        self._layered = df.stringToBool(configString[12:13])
    def __init__(self, configString=""):
        """ Initialize by parsing a a config file  line

        Parameters
        ----------
        configString : str
           Config file line, or empty
        """

        # init to empty
        self._empty = True
        self._step = 0
        self._hrrr0 = False
        self._hrrr3 = False
        self._rap0 = False
        self._rap3 = False
        self._mrms = False
        self._layered = False
        if not configString:
            return

        # parse
        self._empty = False
        self._step = int(configString[0:1])
        self._hrrr0 = df.stringToBool(configString[2:3])
        self._hrrr3 = df.stringToBool(configString[4:5])
        self._rap0 = df.stringToBool(configString[6:7])
        self._rap3 = df.stringToBool(configString[8:9])
        self._mrms = df.stringToBool(configString[10:11])
        self._layered = df.stringToBool(configString[12:13])
def main(argv):

    fileType = argv[0]
    good = False
    if (fileType == 'HRRR' or fileType == 'RAP' or fileType == 'MRMS' or
        fileType == 'GFS'):
       good = True
    if (not good):
       print 'ERROR unknown file type command arg ', fileType
       return 1

    # User must pass the config file into the main driver.
    configFile = argv[1]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found:', configFile
        return 1
    
    # read in fixed main params
    parms = parmRead(configFile, fileType)

    #parms.debugPrint()

    #if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        parms.debugPrint()
        createStateFile(parms, fileType)
        
    # begin normal processing situation
    #WhfLog.debug("....Check for new input data to regid")
    
    # read in state
    state = State(parms._stateFile, fileType)
    if state.isEmpty():
        # error return here
        return 0
    #state.debugPrint()
    
    # query each directory and get newest model run file for each, then
    # get all for that and previous issue time
    data = df.DataFiles(parms._dataDir, parms._maxFcstHour, fileType)
    data.setNewestFiles(parms._hoursBack)

    # Update the state to reflect changes, returning those files to regrid
    # Regrid 'em
    toProcess = state.updateWithNew(data, parms._hoursBack, fileType)
    for f in toProcess:
        regrid(f, fileType, configFile)

    # write out state and exit
    #state.debugPrint()
    state.write(parms._stateFile, fileType)
    return 0
Example #40
0
def SetDynamicLists(item, tree=False):
    ### Sigil of Strength: SetDynamicLists(24562) ###
    ### Damask Patch: SetDynamicLists(71334)      ###
    ### +5 Agony Infusion: SetDynamicLists(49428) ###
    out, r, i = Trawler.TrawlCraftingTree(item, tree)
    if (out == "O"):
        i.update(DD.items)
        r.update(DD.recipes)
        DataFiles.WriteDynamicLists(i, r)
        Trawler.LoadLists()
        Ledger.LoadLists()
    else:
        print("An error has occured. The data is likely corrupt or incorrect.")
        print("DynamicData.py has not been ovewritten.")
Example #41
0
    def boxplot(self, conn, column, table_chosen, title):

        data_df = dfile.single_selector(conn=conn,
                                        table=table_chosen,
                                        column=column)

        box_plot = ggplot(
            aes(x=column),
            data=data_df) + geom_boxplot() + theme_gray() + labs(title=title)
        now = datetime.datetime.now()
        b = now
        print(b)
        print(b - a)
        print(box_plot)
Example #42
0
def connector(*args):
    Host = str(host.get())
    User = str(user.get())
    Password = str(password.get())
    DB = str(database.get())
    x = []
    x = dfiles.connector2(Host=Host, User=User, Password=Password, DB=DB)
    if x[0] == 1:
        success.set('Success')
        # appended at index 1
        connection_det.append(x[1])
        tables_choser(conn=connection_det[1])
    elif x[0] == 0:
        success.set('Failure')
    def write(self, parmFile):
        """ Write contents to a state file

        Parameters
        ----------
        parmFile : str
           Name of file to write to 
        """

        config = SafeConfigParser()

        config.add_section('status')
        config.set('status', 'first', str(int(self._empty)))

        # When adding sections or items, add them in the reverse order of
        # how you want them to be displayed in the actual file.
        # In addition, please note that using RawConfigParser's and the raw
        # mode of ConfigParser's respective set functions, you can assign
        # non-string values to keys internally, but will receive an error
        # when attempting to write to a file or when you get it in non-raw
        # mode. SafeConfigParser does not allow such assignments to take place.
        config.add_section('forecast')

        
        config.set('forecast', 'first', str(df.boolToInt(self._first)))
        config.set('forecast', 'issue_time', self._issue.strftime("%Y%m%d%H"))
        config.set('forecast', 'layered', str(df.boolToInt(self._layered)))
        for f in self._step:
            s = f.writeConfigString()
            stepName = f.stepName()
            config.set('forecast', stepName, s)
        config.set('forecast', 'clock_time',
                   self._clockTime.strftime("%Y-%m-%d_%H:%M:%S"))
        
        # Write it out
        with open(parmFile, 'wb') as configfile:
            config.write(configfile)
    def update(self, time, hoursBack):
        """Update state so that input time is newest one

       Parameters
       ----------
       time: ForecastTime
          The newest time
       hoursBack:
          Maximum issue time hours back compared to time
       
       Returns
       -------
       none
       """
        self._cfs = df.filterWithinNHours(self._cfs, 'CFS', time, hoursBack)
   def update(self, time, hoursBack):
      """Update state so that input time is newest one

       Parameters
       ----------
       time: ForecastTime
          The newest time
       hoursBack:
          Maximum issue time hours back compared to time
       
       Returns
       -------
       none
       """
      self._cfs = df.filterWithinNHours(self._cfs, 'CFS', time, hoursBack)
 def debugPrint(self):
     """ logging debug of content
     """
     WhfLog.debug("FcstStep: empty=%d", self._empty)
     if (self._empty):
         return
     WhfLog.debug("FcstStep[%d] hrrr0:%d hrrr3:%d rap0:%d rap3:%d mrms:%d lay:%d",
                   self._step, df.boolToInt(self._hrrr0), df.boolToInt(self._hrrr3),
                   df.boolToInt(self._rap0), df.boolToInt(self._rap3),
                   df.boolToInt(self._mrms), df.boolToInt(self._layered))
Example #47
0
def removeYmdDirs(source_dir):
   '''Remove the yyyymmdd subdirectories of source_dir
   '''
   try:
      #Source directory
      dirExists(source_dir)

   except MissingDirectoryError:
      print "Source directory missing. Check directory path", source_dir

   else:
      # Get ymd subdirectories
      ymdDirs = df.getYyyymmddSubdirectories(source_dir)
      for ymd in ymdDirs:
         path = source_dir + "/" + ymd
         print "Removing -r ", path
         shutil.rmtree(path)
   def update(self, time, hoursBack, fileType):
      """Update typed state so that input time is newest one

       Parameters
       ----------
       time: ForecastTime
          The newest time
       hoursBack:
          Maximum issue time hours back compared to time
       fileType: str
          'HRRR', ...
       
       Returns
       -------
       none
       """
      self._data = df.filterWithinNHours(self._data, fileType, time, hoursBack)
 def debugPrintString(self):
     """ logging debug of content
     Returns
     -------
     str
     """
     if (self._empty):
         return ""
     ret = 's[%d] hr0[%d] hr3[%d] rp0[%d] rp3[%d] mrms[%d] lay[%d]'%(self._step,
                                                                     df.boolToInt(self._hrrr0),
                                                                     df.boolToInt(self._hrrr3),
                                                                     df.boolToInt(self._rap0),
                                                                     df.boolToInt(self._rap3),
                                                                     df.boolToInt(self._mrms),
                                                                     df.boolToInt(self._layered))
     return ret
    def initFromStateFile(self, parmFile):
        """ Initialize from the sate file, by parsing it
        Parameters
        ----------
        parmFile : str
           Name of file to parse
        """

        cf = SafeConfigParser()
        cf.read(parmFile)
        self._empty = False
        self._model = []
        self._state = []
        status = df.stringToBool(cf.get('status', 'first'))
        if (status):
            self._empty = True
            return
        for m in cf.get('model', 'model_run').split():
            self._model.append(Model(m))
        
        for m in cf.get('forecast', 'forecast1').split():
            self._fState.append(ForecastStatus(m))
 def writeConfigString(self):
     """ Write local content as a one line string
     Returns
     -------
     str
     """
     if (self._empty):
         # write fake stuff out
         ret = "0 0 0 0 0 0 0"
     else:
         ret = '%01d %01d %01d %01d %01d %01d %01d' %(self._step,
                                                      df.boolToInt(self._hrrr0),
                                                      df.boolToInt(self._hrrr3),
                                                      df.boolToInt(self._rap0),
                                                      df.boolToInt(self._rap3),
                                                      df.boolToInt(self._mrms),
                                                      df.boolToInt(self._layered))
     return ret
def forcing(configFile,file_in):
    """ Args:
	1.) configFile (string): The config file with all 
	    the settings.
        2.) file (string): The file name. The full path is 
            not necessary as full paths will be derived from
            parameter directory paths and datetime information.
        Returns:
	None - Performs indicated bias correction, regridding,
               and downscaling of CFSv2 data. Any errors are
               trapped and passed back to the driver.
    """

    WhfLog.debug("file_in = %s", file_in)

    # Obtain CFSv2 forcing engine parameters.
    parser = SafeConfigParser()
    parser.read(configFile)

    # Set up logging environments, etc.
    forcing_config_label = "Long_Range"
    try:
        Whf.initial_setup(parser,forcing_config_label)
    except:
        raise

    out_dir = parser.get('layering','long_range_output') 
    tmp_dir = parser.get('bias_correction','CFS_tmp_dir')

    if (not df.makeDirIfNeeded(out_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', out_dir)
    if (not df.makeDirIfNeeded(tmp_dir)):
        raise MissingDirectoryError('Dir %s cannot be created', tmp_dir)

    # Define CFSv2 cycle date and valid time based on file name.
    (cycleYYYYMMDD,cycleHH,fcsthr,em) = Whf.extract_file_info_cfs(file_in)
    em_str = str(em)

    # Pull path to NCL bias correction module file. Export this as an 
    # environmental variable NCL refers to later. 
    nclBiasMod = parser.get('exe','CFS_bias_correct_mod')
    os.environ["CFS_NCL_BIAS_MOD"] = nclBiasMod

    # Establish datetime objects
    dateCurrent = datetime.datetime.today()
    dateCycleYYYYMMDDHH = datetime.datetime(year=int(cycleYYYYMMDD[0:4]),
                          month=int(cycleYYYYMMDD[4:6]),
                          day=int(cycleYYYYMMDD[6:8]),
                          hour=cycleHH)
    dateFcstYYYYMMDDHH = dateCycleYYYYMMDDHH + \
                         datetime.timedelta(seconds=fcsthr*3600)

    # Determine if this is a 0hr forecast file or not.
    if dateFcstYYYYMMDDHH == dateCycleYYYYMMDDHH:
        fFlag = 1 
    else:
        fFlag = 0 
    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/Member_" + em_str.zfill(2) + "/" + \
               dateCycleYYYYMMDDHH.strftime("%Y%m%d%H")

    try:
        Whf.mkdir_p(out_path)
    except:
        raise

    in_fcst_range = Whf.is_in_fcst_range("CFSv2",fcsthr,parser)

    if in_fcst_range:
        # First, bias-correct CFSv2 data and generate hourly files 
        # from six-hour forecast
        WhfLog.info("Bias correcting for CFSv2 cycle: " + \
                     dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                     " CFSv2 forecast time: " + dateFcstYYYYMMDDHH.strftime('%Y%m%d%H'))
        try:
            Whf.bias_correction('CFSV2',file_in,dateCycleYYYYMMDDHH,
                                dateFcstYYYYMMDDHH,parser, em = em)
        except (MissingFileError,NCLError):
            raise

        # Second, regrid to the conus IOC domain
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename 
        # generated from bias-correction and call the regridding to go to the conus domain.
        if fFlag == 1:
            begCt = 6 
            endCt = 7
        else:
            begCt = 1
            endCt = 7
        for hour in range(begCt,endCt):
  	    dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)
               
            fileBiasCorrected = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                                dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + ".M" + \
                                em_str.zfill(2) + ".nc"
            WhfLog.info("Regridding CFSv2 to conus domain for cycle: " + \
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + \
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            try:
                fileRegridded = Whf.regrid_data("CFSV2",fileBiasCorrected,parser)
            except (MissingFileError,NCLError):
                raise

            # Double check to make sure file was created, delete temporary bias-corrected file
            try:
                Whf.file_exists(fileRegridded)
            except MissingFileError:
                raise	
            cmd = "rm -rf " + fileBiasCorrected
            status = os.system(cmd)
            if status != 0:
		raise SystemCommandError('Command %s failed.'%cmd)

  
        # Third, perform topography downscaling to generate final
        # Loop through each hour in a six-hour CFSv2 forecast time step, compose temporary filename
        # generated from regridding and call the downscaling function.
        for hour in range(begCt,endCt):
            dateTempYYYYMMDDHH = dateFcstYYYYMMDDHH - datetime.timedelta(seconds=(6-hour)*3600)

            WhfLog.info("Downscaling CFSv2 for cycle: " +
                         dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') +
                         " forecast time: " + dateTempYYYYMMDDHH.strftime('%Y%m%d%H'))
            fileRegridded = tmp_dir + "/CFSv2_bias_corrected_TMP_" + \
                            dateCycleYYYYMMDDHH.strftime('%Y%m%d%H') + "_" + \
                                dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + \
                                "_regridded.M" + em_str.zfill(2) + ".nc"
            LDASIN_path_tmp = tmp_dir + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1.nc"
            LDASIN_path_final = out_path + "/" + dateTempYYYYMMDDHH.strftime('%Y%m%d%H') + "00.LDASIN_DOMAIN1"
            try:
                Whf.downscale_data("CFSv2",fileRegridded,parser, out_path=LDASIN_path_tmp, \
                                   verYYYYMMDDHH=dateTempYYYYMMDDHH)
            except (MissingFileError,FilenameMatchError,NCLError,SystemCommandError):
                raise
            # Double check to make sure file was created, delete temporary regridded file
            try:
                Whf.file_exists(LDASIN_path_tmp)
            except MissingFileError:
                raise
            # Rename file to conform to WRF-Hydro expectations
            cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
            try:
                Whf.file_exists(LDASIN_path_final)
            except MissingFileError:
                raise
            cmd = "rm -rf " + fileRegridded
            status = os.system(cmd)
            if status != 0:
                raise SystemCommandError('Command %s failed.'%cmd)
       
	WhfLog.info("Long_Range processing for %s%d Forecast Hour: %d Ensemble: %s",
                cycleYYYYMMDD, cycleHH, fcsthr, em_str)
    else:
        # Skip processing this file. Exit gracefully with a 0 exit status.
        WhfLog.info("Requested file is outside max fcst for CFSv2")
def ClassifyERPs (
		featurefiles,
		C = (10.0, 1.0, 0.1, 0.01),
		gamma = (1.0, 0.8, 0.6, 0.4, 0.2, 0.0),
		keepchan = (),
		rmchan = (),
		rmchan_usualsuspects = ('AUDL','AUDR','LAUD','RAUD','SYNC','VSYNC', 'VMRK', 'OLDREF'),
		rebias = True,
		save = False,
		select = False,
		description='ERPs to attended vs unattended events',
		maxcount=None,
		classes=None,
		folds=None,
		time_window=None,
		keeptrials=None,
	):

	file_inventory = []
	d = DataFiles.load(featurefiles, catdim=0, maxcount=maxcount, return_details=file_inventory)
 	if isinstance(folds, basestring) and folds.lower() in ['lofo', 'loro', 'leave on run out', 'leave one file out']:
 		n, folds = 0, []
 		for each in file_inventory:
 			neach = each[1]['x']
 			folds.append(range(n, n+neach))
 			n += neach
 	
	if 'x' not in d: raise ValueError("found no trial data - no 'x' variable - in the specified files")
	if 'y' not in d: raise ValueError("found no trial labels - no 'y' variable - in the specified files")

	x = d['x']
	y = numpy.array(d['y'].flat)
	if keeptrials != None:
		x = x[numpy.asarray(keeptrials), :, :]
		y = y[numpy.asarray(keeptrials)]
		
	if time_window != None:
		fs = d['fs']
		t = SigTools.samples2msec(numpy.arange(x.shape[2]), fs)
		x[:, :, t<min(time_window)] = 0
		x[:, :, t>max(time_window)] = 0
		
		
	if classes != None:
		for cl in classes:
			if cl not in y: raise ValueError("class %s is not in the dataset" % str(cl))
		mask = numpy.array([yi in classes for yi in y])
		y = y[mask]
		x = x[mask]
		discarded = sum(mask==False)
		if discarded: print "discarding %d trials that are outside the requested classes %s"%(discarded,str(classes))
		
	n = len(y)
	uy = numpy.unique(y)
	if uy.size != 2: raise ValueError("expected 2 classes in dataset, found %d : %s" % (uy.size, str(uy)))
	for uyi in uy:
		nyi = sum([yi==uyi for yi in y])
		nyi_min = 2
		if nyi < nyi_min: raise ValueError('only %d exemplars of class %s - need at least %d' % (nyi,str(uyi),nyi_min))
			
	y = numpy.sign(y - uy.mean())

	cov,trchvar = SigTools.spcov(x=x, y=y, balance=False, return_trchvar=True) # NB: symwhitenkern would not be able to balance
	
	starttime = time.time()
	
	chlower = [ch.lower() for ch in d['channels']]
	if keepchan in [None,(),'',[]]:
		if isinstance(rmchan, basestring): rmchan = rmchan.split()
		if isinstance(rmchan_usualsuspects, basestring): rmchan_usualsuspects = rmchan_usualsuspects.split()
		allrmchan = [ch.lower() for ch in list(rmchan)+list(rmchan_usualsuspects)]
		unwanted = numpy.array([ch in allrmchan for ch in chlower])
		notfound = [ch for ch in rmchan if ch.lower() not in chlower]
	else:
		if isinstance(keepchan, basestring): keepchan = keepchan.split()
		lowerkeepchan = [ch.lower() for ch in keepchan]
		unwanted = numpy.array([ch not in lowerkeepchan for ch in chlower])
		notfound = [ch for ch in keepchan if ch.lower() not in chlower]
		
	wanted = numpy.logical_not(unwanted)
	print ' '
	if len(notfound): print "WARNING: could not find channel%s %s\n" % ({1:''}.get(len(notfound),'s'), ', '.join(notfound))
	removed = [ch for removing,ch in zip(unwanted, d['channels']) if removing]
	if len(removed): print "removed %d channel%s (%s)" % (len(removed), {1:''}.get(len(removed),'s'), ', '.join(removed))
	print "classification will be based on %d channel%s" % (sum(wanted), {1:''}.get(sum(wanted),'s'))
	print "%d negatives + %d positives = %d exemplars" % (sum(y<0), sum(y>0), n)
	print ' '
	
	x[:, unwanted, :] = 0
	cov[:, unwanted] = 0
	cov[unwanted, :] = 0
	nu = numpy.asarray(cov).diagonal()[wanted].mean()
	for i in range(len(cov)):
		if cov[i,i] == 0: cov[i,i] = nu
	
	if not isinstance(C, (tuple,list,numpy.ndarray,type(None))): C = [C]
	if not isinstance(gamma, (tuple,list,numpy.ndarray,type(None))): gamma = [gamma]

	c = SigTools.klr2class(lossfunc=SigTools.balanced_loss, relcost='balance')
	c.varyhyper({})
	if c != None: c.hyper.C=list(C)
	if gamma == None: c.hyper.kernel.func = SigTools.linkern
	else: c.varyhyper({'kernel.func':SigTools.symwhitenkern, 'kernel.cov':[cov], 'kernel.gamma':list(gamma)})
	c.cvtrain(x=x, y=y, folds=folds)
	if rebias: c.rebias()
	c.calibrate()

	chosen = c.cv.chosen.hyper
	if gamma == None:
		Ps = None
		Gp = c.featureweight(x=x)
	else:
		Ps = SigTools.svd(SigTools.shrinkcov(cov, copy=True, gamma=chosen.kernel.gamma)).isqrtm
		xp = SigTools.spfilt(x, Ps.H, copy=True)
		Gp = c.featureweight(x=xp)
	
	u = SigTools.stfac(Gp, Ps)
	u.channels = d['channels']		
	u.channels_used = wanted
	u.fs = d['fs']
	u.trchvar = trchvar
	try: u.channels = SigTools.ChannelSet(u.channels)
	except: print 'WARNING: failed to convert channels to ChannelSet'

	elapsed = time.time() - starttime
	minutes = int(elapsed/60.0)
	seconds = int(round(elapsed - minutes * 60.0))
	print '%d min %d sec' % (minutes, seconds)
	datestamp = time.strftime('%Y-%m-%d %H:%M:%S')
	csummary = '%s (%s) trained on %d (CV %s = %.3f) at %s' % (
		c.__class__.__name__,
		SigTools.experiment()._shortdesc(chosen),
		sum(c.input.istrain),
		c.loss.func.__name__,
		c.loss.train,
		datestamp,
	)
	description = 'binary classification of %s: %s' % (description, csummary)
	u.description = description
	
	if save or select:
		if not isinstance(save, basestring):
			save = featurefiles
			if isinstance(save, (tuple,list)): save = save[-1]
			if save.lower().endswith('.gz'): save = save[:-3]
			if save.lower().endswith('.pk'): save = save[:-3]
			save = save + '_weights.prm'
		print "\nsaving %s\n" % save
		Parameters.Param(u.G.A, Name='ERPClassifierWeights', Section='PythonSig', Subsection='Epoch', Comment=csummary).write_to(save)
		Parameters.Param(c.model.bias, Name='ERPClassifierBias', Section='PythonSig', Subsection='Epoch', Comment=csummary).append_to(save)
		Parameters.Param(description, Name='SignalProcessingDescription', Section='PythonSig').append_to(save)
		if select:
			if not isinstance(select, basestring): select = 'ChosenWeights.prm'
			if not os.path.isabs(select): select = os.path.join(os.path.split(save)[0], select)
			print "saving %s\n" % select
			import shutil; shutil.copyfile(save, select)
	
	print description
	return u,c
def anal_assim_layer(cycleYYYYMMDDHH, fhr, action, config):
    """ Analysis and Assimilation layering
        Performs layering/combination of RAP/HRRR/MRMS
        data for a particular analysis and assimilation
        model cycle and forecast hour.

        Args:
            cycleYYYYMMDDHH (string): Analysis and assimilation
                                      model cycle date.
            fhr (string): Forecast hour of analysis and assimilation 
                          model cycle. Possible values are -2, -1, 0.
            action (string): Specifying which layering to do, given
                             possible available model data. Possible 
                             values are "RAP", "RAP_HRRR", and
                             "RAP_HRRR_MRMS".
            config (string) : Config file name
        Returns: 
            None: Performs specified layering to final input directory
                  used for WRF-Hydro.
    """

    # Determine specific layering route to take
    str_split = action.split("_")
    process = len(str_split)

    # Determine specific date/time information used for composing regridded
    # file paths.
    yearCycle = int(cycleYYYYMMDDHH[0:4])
    monthCycle = int(cycleYYYYMMDDHH[4:6])
    dayCycle = int(cycleYYYYMMDDHH[6:8])
    hourCycle = int(cycleYYYYMMDDHH[8:10])
    fhr = int(fhr)

    dateCurrent = datetime.datetime.today()
    cycleDate = datetime.datetime(year=yearCycle, month=monthCycle, day=dayCycle, hour=hourCycle)
    validDate = cycleDate + datetime.timedelta(seconds=fhr * 3600)
    fcstWindowDate = validDate + datetime.timedelta(seconds=-3 * 3600)  # Used for 3-hr forecast

    # HRRR/RAP files necessary for fluxes and precipitation data.
    # Obtain analysis and assimiltation configuration parameters.
    parser = SafeConfigParser()
    parser.read(config)
    out_dir = parser.get("layering", "analysis_assimilation_output")
    tmp_dir = parser.get("layering", "analysis_assimilation_tmp")
    qpe_parm_dir = parser.get("layering", "qpe_combine_parm_dir")
    hrrr_ds_dir_3hr = parser.get("downscaling", "HRRR_finished_output_dir")
    hrrr_ds_dir_0hr = parser.get("downscaling", "HRRR_finished_output_dir_0hr")
    rap_ds_dir_3hr = parser.get("downscaling", "RAP_finished_output_dir")
    rap_ds_dir_0hr = parser.get("downscaling", "RAP_finished_output_dir_0hr")
    mrms_ds_dir = parser.get("regridding", "MRMS_finished_output_dir")
    layer_exe = parser.get("exe", "Analysis_Assimilation_layering")
    ncl_exec = parser.get("exe", "ncl_exe")

    # in case it is first time, create the output dirs
    df.makeDirIfNeeded(out_dir)
    df.makeDirIfNeeded(tmp_dir)

    # Sanity checking
    try:
        whf.dir_exists(out_dir)
        whf.dir_exists(tmp_dir)
        whf.dir_exists(qpe_parm_dir)
        whf.dir_exists(hrrr_ds_dir_3hr)
        whf.dir_exists(hrrr_ds_dir_0hr)
        whf.dir_exists(rap_ds_dir_3hr)
        whf.dir_exists(rap_ds_dir_0hr)
        whf.dir_exists(mrms_ds_dir)
        whf.file_exists(layer_exe)
    except MissingDirectoryError:
        WhfLog.error("Missing directory during preliminary checking of Analysis Assimilation layering")
        raise

    # Establish final output directories to hold 'LDASIN' files used for
    # WRF-Hydro long-range forecasting. If the directory does not exist,
    # create it.
    out_path = out_dir + "/" + cycleDate.strftime("%Y%m%d%H")

    whf.mkdir_p(out_path)

    # Compose necessary file paths
    hrrr0Path = (
        hrrr_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrr3Path = (
        hrrr_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap0Path = (
        rap_ds_dir_0hr
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    rap3Path = (
        rap_ds_dir_3hr
        + "/"
        + fcstWindowDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    mrmsPath = (
        mrms_ds_dir
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "/"
        + validDate.strftime("%Y%m%d%H")
        + "00.LDASIN_DOMAIN1.nc"
    )
    hrrrBiasPath = qpe_parm_dir + "/HRRR_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    hrrrWgtPath = qpe_parm_dir + "/HRRR_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    mrmsBiasPath = (
        qpe_parm_dir + "/MRMS_radonly_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km-sm60.grb2"
    )
    mrmsWgtPath = qpe_parm_dir + "/MRMS_radonly_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"
    rapBiasPath = qpe_parm_dir + "/RAPD_NLDAS-CPC_bias-corr_m" + validDate.strftime("%m") + "_v9_wrf1km.grb2"
    rapWgtPath = qpe_parm_dir + "/RAPD_wgt_m" + validDate.strftime("%m") + "_v8_wrf1km.grb2"

    # Sanity checking on parameter data
    try:
        whf.file_exists(hrrrBiasPath)
        whf.file_exists(hrrrWgtPath)
        whf.file_exists(mrmsBiasPath)
        whf.file_exists(mrmsWgtPath)
        whf.file_exists(rapBiasPath)
        whf.file_exists(rapWgtPath)
    except MissingFileError:
        WhfLog.error("Missing file encountered while checking parameter data for AA")
        raise

    # Compose output file paths
    LDASIN_path_tmp = tmp_dir + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1_TMP.nc"
    LDASIN_path_final = out_path + "/" + validDate.strftime("%Y%m%d%H") + "00.LDASIN_DOMAIN1"
    # Perform layering/combining depending on processing path.
    if process == 1:  # RAP only
        WhfLog.info(
            "Layering and Combining RAP only for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP files for layering")
            raise

    elif process == 2:  # HRRR and RAP only
        WhfLog.info(
            "Layering and Combining RAP and HRRR for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR files for layering")
            raise
    elif process == 3:  # HRRR, RAP, and MRMS
        WhfLog.info(
            "Layering and Combining RAP/HRRR/MRMS for cycle date: "
            + cycleDate.strftime("%Y%m%d%H")
            + " valid date: "
            + validDate.strftime("%Y%m%d%H")
        )
        # Check for existence of input files
        try:
            whf.file_exists(rap0Path)
            whf.file_exists(rap3Path)
            whf.file_exists(hrrr0Path)
            whf.file_exists(hrrr3Path)
            whf.file_exists(mrmsPath)
        except MissingFileError:
            WhfLog.error("Missing RAP or HRRR or MRMS files for layering")
            raise

    else:  # Error out
        WhfLog.error("Invalid input action selected, invalid layer combination provided in AA.")
        raise UnrecognizedCommandError

    hrrrB_param = "'hrrrBFile=" + '"' + hrrrBiasPath + '"' + "' "
    mrmsB_param = "'mrmsBFile=" + '"' + mrmsBiasPath + '"' + "' "
    rapB_param = "'rapBFile=" + '"' + rapBiasPath + '"' + "' "
    hrrrW_param = "'hrrrWFile=" + '"' + hrrrWgtPath + '"' + "' "
    mrmsW_param = "'mrmsWFile=" + '"' + mrmsWgtPath + '"' + "' "
    rapW_param = "'rapWFile=" + '"' + rapWgtPath + '"' + "' "
    hrrr0_param = "'hrrr0File=" + '"' + hrrr0Path + '"' + "' "
    hrrr3_param = "'hrrr3File=" + '"' + hrrr3Path + '"' + "' "
    rap0_param = "'rap0File=" + '"' + rap0Path + '"' + "' "
    rap3_param = "'rap3File=" + '"' + rap3Path + '"' + "' "
    mrms_param = "'mrmsFile=" + '"' + mrmsPath + '"' + "' "
    process_param = "'process=" + '"' + str(process) + '"' + "' "
    out_param = "'outPath=" + '"' + LDASIN_path_tmp + '"' + "' "

    cmd_params = (
        hrrrB_param
        + mrmsB_param
        + rapB_param
        + hrrrW_param
        + mrmsW_param
        + rapW_param
        + hrrr0_param
        + hrrr3_param
        + rap0_param
        + rap3_param
        + mrms_param
        + process_param
        + out_param
    )
    cmd = ncl_exec + " -Q " + cmd_params + " " + layer_exe
    status = os.system(cmd)

    if status != 0:
        WhfLog.error("Error in combinining NCL program")
        raise NCLError("NCL error encountered while combining in AA")

    # Double check to make sure file was created, delete temporary regridded file
    whf.file_exists(LDASIN_path_tmp)
    # Rename file to conform to WRF-Hydro expectations
    cmd = "mv " + LDASIN_path_tmp + " " + LDASIN_path_final
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to rename " + LDASIN_path_tmp)
    try:
        whf.file_exists(LDASIN_path_final)
    except MissingFileError:
        WhfLog.error("Missing LDASIN_path_final file")
        raise
    cmd = "rm -rf " + LDASIN_path_tmp
    status = os.system(cmd)
    if status != 0:
        WhfLog.error("Failure to remove " + LDASIN_path_tmp)
        raise SystemCommandError
def main(argv):

    # User must pass the config file into the main driver.
    configFile = argv[0]
    if not os.path.exists(configFile):
        print 'ERROR forcing engine config file not found.'
        return 1

    # read in fixed main params
    parms = parmRead(configFile)

    newestT = ""
    newestT1 = df.newestIssueTime(parms._hrrrDir)
    if (newestT):
        if (newestT1):
            if (newestT1 > newestT):
                newestT = newestT1
    else:
        newestT = newestT1
        
    newestT1 = df.newestIssueTime(parms._rapDir)
    if (newestT):
        if (newestT1):
            if (newestT1 > newestT):
                newestT = newestT1
    else:
        newestT = newestT1
    newestT1 = df.newestIssueTime(parms._hrrr0hrDir)
    if (newestT):
        if (newestT1):
            if (newestT1 > newestT):
                newestT = newestT1
    else:
        newestT = newestT1
        
    newestT1 = df.newestIssueTime(parms._rap0hrDir)
    if (newestT):
        if (newestT1):
            if (newestT1 > newestT):
                newestT = newestT1
    else:
        newestT = newestT1
        
    newestT1 = df.newestIssueTime(parms._mrmsDir)
    if (newestT):
        if (newestT1):
            if (newestT1 > newestT):
                newestT = newestT1
    else:
        newestT = newestT1
        

    if (not newestT):
        WhfLog.debug("No data")
        return 0

    # if there is not a state file, create one now using newest
    if (not os.path.exists(parms._stateFile)):
        state = State()
        WhfLog.info("Initializing")
        state.initialize(parms, newestT)
        state.write(parms._stateFile)

    # Normal processing situation
    #WhfLog.debug("Look for Layering....")
    
    # read in state
    state2 = State()
    state2.initFromStateFile(parms._stateFile)
    #state2.debugPrint()
    if state2._empty:
        # error return here
        return 0
    
    # check for new issue time
    if (state2.isNewModelIssueTime(newestT)):
        WhfLog.info("Re-Initializing state, new model issue time %s", newestT)
        state2.initialize(parms, newestT)

    # update availability
    state2.setCurrentModelAvailability(parms, configFile)

    # write out final state
    state2.write(parms._stateFile)
    return 0