try: local_fnamefmt = \ davitpy.rcParams['DAVIT_SD_LOCAL_FNAMEFMT'].split(',') except: local_fnamefmt = ['{date}.{hemi}.{ftype}'] estr = 'Environment variable DAVIT_SD_LOCAL_' estr = '{:s}FNAMEFMT not set, using '.format(estr) estr = '{:s}default: {:s}'.format(estr, local_fnamefmt) logging.info(estr) outdir = tmpdir # fetch the local files temp = futils.fetch_local_files(self.sTime, self.eTime, local_dirfmt, local_dict, outdir, local_fnamefmt) # check to see if the files actually have data between # stime and etime valid = self.__validate_fetched(temp, self.sTime, self.eTime) filelist = [x[0] for x in zip(temp, valid) if x[1]] invalid_files = [x[0] for x in zip(temp, valid) if not x[1]] if len(invalid_files) > 0: for f in invalid_files: estr = 'removing invalid file: {:s}'.format(f) logging.info(estr) os.system('rm {:s}'.format(f))
try: local_fnamefmt = \ davitpy.rcParams['DAVIT_SD_LOCAL_FNAMEFMT'].split(',') except: local_fnamefmt = ['{date}.{hemi}.{ftype}'] estr = 'Environment variable DAVIT_SD_LOCAL_' estr = '{:s}FNAMEFMT not set, using '.format(estr) estr = '{:s}default: {:s}'.format( estr, local_fnamefmt) logging.info(estr) outdir = tmpdir # fetch the local files temp = futils.fetch_local_files(self.sTime, self.eTime, local_dirfmt, local_dict, outdir, local_fnamefmt) # check to see if the files actually have data between # stime and etime valid = self.__validate_fetched(temp, self.sTime, self.eTime) filelist = [x[0] for x in zip(temp, valid) if x[1]] invalid_files = [ x[0] for x in zip(temp, valid) if not x[1] ] if len(invalid_files) > 0: for f in invalid_files: estr = 'removing invalid file: {:s}'.format(f) logging.info(estr)
if local_dict is None: local_dict = {'hemi':hemi, 'ftype':ftype} if ('ftype' in local_dict.keys()): local_dict['ftype'] = ftype if local_fnamefmt is None: try: local_fnamefmt = davitpy.rcParams['DAVIT_SD_LOCAL_FNAMEFMT'].split(',') except: local_fnamefmt = ['{date}.{hemi}.{ftype}'] print 'Environment variable DAVIT_SD_LOCAL_FNAMEFMT not set, using default:',local_fnamefmt outdir = tmpDir #fetch the local files temp = fetch_local_files(self.sTime, self.eTime, local_dirfmt, local_dict, outdir, \ local_fnamefmt, verbose=verbose) # check to see if the files actually have data between stime and etime valid = self.__validate_fetched(temp,self.sTime,self.eTime) filelist = [x[0] for x in zip(temp,valid) if x[1]] invalid_files = [x[0] for x in zip(temp,valid) if not x[1]] if len(invalid_files) > 0: for f in invalid_files: print 'removing invalid file: ' + f os.system('rm ' + f) # If we have valid files then continue if(len(filelist) > 0): print 'found',ftype,'data in local files' self.fType,self.dType = ftype,'dmap'
def fetch_concat(ctr_date, localdirfmt, localdict, tmpdir, fnamefmt, oneday_file_only=False): """ fetches files for one or three days centered at ctr_date.day, then unzips and concatenates them into a single file Parameters ---------- ctr_date : datetime.datetime a full day for which data are to be read. localdirfmt : str string defining the local directory structure (eg "{ftype}/{year}/{month}/{day}/") localdict : dict Contains keys for non-time related information in remotedirfmt and fnamefmt (eg remotedict={'ftype':'fitex','radar':'sas','channel':'a'}) tmpdir : str Temporary directory in which to store uncompressed files (must end with a "/"). fnamefmt : str or list Optional string or list of file name formats (eg fnamefmt = ['{date}.{hour}......{radar}.{channel}.{ftype}', \ '{date}.C0.{radar}.{ftype}'] or fnamefmt = '{date}.{hour}......{radar}.{ftype}') oneday_file_only : bool If set to True three days of data centered at ctr_date will be concatenated. If set to False one day of data specified by ctr_date will be concatenated. Returns ------- str full path of the contatenated filename. """ # construct stime and etime for one-day worthy of data only if oneday_file_only: stime = ctr_date etime = ctr_date + dt.timedelta(days=1) # construct stime and etime for three-day worthy of data only else: # expend the time to three days stime = ctr_date - dt.timedelta(days=1) etime = ctr_date + dt.timedelta(days=2) # extract info from the localdict argument radcode = localdict["radar"] ftype = localdict["ftype"] channel = localdict["channel"] # fetch the data for one day or three days file_list = fetch_local_files(stime, etime, localdirfmt, localdict, tmpdir, fnamefmt) # check if we have found files if len(file_list) != 0: # concatenate the files into a single file logging.info('Concatenating all the files in to one') # choose a temp file name with time span info for cacheing if (channel is None) or (channel == "."): tmp_name = '%s%s.%s.%s.%s.%s.%s' % \ (tmpdir, stime.strftime("%Y%m%d"), stime.strftime("%H%M%S"), etime.strftime("%Y%m%d"), etime.strftime("%H%M%S"), radcode, ftype) else: tmp_name = '%s%s.%s.%s.%s.%s.%s.%s' % \ (tmpdir, stime.strftime("%Y%m%d"), stime.strftime("%H%M%S"), etime.strftime("%Y%m%d"), etime.strftime("%H%M%S"), radcode, channel, ftype) logging.debug('cat ' + string.join(file_list) + ' > ' + tmp_name) os.system('cat ' + string.join(file_list) + ' > ' + tmp_name) # remove the unneeded files from the tmpdir for file_name in file_list: logging.debug('rm ' + file_name) os.system('rm ' + file_name) os.system('rm ' + file_name + ".bz2") #os.system('rm ' + file_name+".gz") logging.info("removed unneeded files") else: tmp_name = None fname = tmp_name return fname