def write_to_nc(self, pathtofile=None, file_date_incr=None): if 'error' in vars(self): print('Erroneous collocation_class file detected') print('--> dump to netCDF not possible !') else: tmpdate = self.sdate edate = self.edate while tmpdate <= edate: if pathtofile is None: path_template = collocation_dict[self.obstype]\ ['dst']\ ['path_template'][0] file_template = collocation_dict[self.obstype]\ ['dst']\ ['file_template'] strsublst = collocation_dict[self.obstype]\ ['dst']['strsub'] subdict = make_subdict(strsublst, class_object_dict=vars(self)) if 'filterData' in vars(self).keys(): file_template = 'filtered_' + file_template tmppath = os.path.join(path_template, file_template) if isinstance(self.leadtime, str): leadtimestr = self.leadtime else: leadtimestr = "{:0>3d}h".format(self.leadtime) if self.obstype == 'insitu': pathtofile = make_pathtofile(tmppath, strsublst, subdict, date=tmpdate) elif self.obstype == 'satellite_altimeter': pathtofile = make_pathtofile(tmppath, strsublst, subdict, date=tmpdate) if self.obstype == 'insitu': title = ('Collocation of ' + self.stdvarname + ' observations from ' + self.nID + ' ' + self.sensor + ' vs ' + self.model) elif self.obstype == 'satellite_altimeter': title = ('Collocation of ' + self.stdvarname + ' observations from ' + self.mission + ' vs ' + self.model) dumptonc_ts_collocation(self, pathtofile, title) # determine date increment if file_date_incr is None: file_date_incr = collocation_dict[self.obstype]\ ['dst'].get('file_date_incr','m') if file_date_incr == 'm': tmpdate += relativedelta(months=+1) elif file_date_incr == 'Y': tmpdate += relativedelta(years=+1) elif file_date_incr == 'd': tmpdate += timedelta(days=+1) return
def write_to_nc(self,pathtofile=None,file_date_incr=None): # divide time into months by loop over months from sdate to edate if 'error' in vars(self): print('Erroneous insitu_class file detected') print('--> dump to netCDF not possible !') else: tmpdate = self.sdate edate = self.edate while tmpdate <= edate: if pathtofile is None: path_template = insitu_dict[self.nID]['dst']\ ['path_template'][0] file_template = insitu_dict[self.nID]['dst']\ ['file_template'] strsublst = insitu_dict[self.nID]['dst']['strsub'] if 'filterData' in vars(self).keys(): file_template = 'filtered_' + file_template tmppath = os.path.join(path_template,file_template) pathtofile = make_pathtofile(tmppath,strsublst, vars(self), date=tmpdate) title = ( self.varalias + ' observations from ' + self.nID + ' ' + self.sensor ) dumptonc_ts_insitu(self,pathtofile,title) # determine date increment if file_date_incr is None: file_date_incr = insitu_dict[self.nID]\ ['src'].get('file_date_incr','m') if file_date_incr == 'm': tmpdate += relativedelta(months = +1) elif file_date_incr == 'Y': tmpdate += relativedelta(years = +1) elif file_date_incr == 'd': tmpdate += timedelta(days = +1) return
def write_to_nc(self, pathtofile=None, file_date_incr=None): """ Write class variables to netcdf files. param: pathtofile file_date_incr - what date increment to use for files """ if 'error' in vars(self): print('Erroneous satellite_class file detected') print('--> dump to netCDF not possible !') else: tmpdate = self.sdate edate = self.edate while tmpdate <= edate: if pathtofile is None: path_template = satellite_dict[self.product]\ ['dst']\ ['path_template'] file_template = satellite_dict[self.product]\ ['dst']\ ['file_template'] strsublst = satellite_dict[self.product]\ ['dst']['strsub'] if 'filterData' in vars(self).keys(): file_template = 'filtered_' + file_template tmppath = os.path.join(path_template, file_template) subdict = make_subdict(strsublst, class_object_dict=vars(self)) pathtofile = make_pathtofile(tmppath, strsublst, subdict, date=tmpdate) title = (self.obstype + ' observations from ' + self.mission) dumptonc_ts_sat(self, pathtofile, title) # determine date increment if file_date_incr is None: file_date_incr = satellite_dict[self.product]\ ['dst'].get('file_date_incr','m') if file_date_incr == 'm': tmpdate += relativedelta(months=+1) elif file_date_incr == 'Y': tmpdate += relativedelta(years=+1) elif file_date_incr == 'd': tmpdate += timedelta(days=+1) return
def get_local_files(sdate, edate, twin, product, dict_for_sub=None, path_local=None): """ Function to retrieve list of files/paths for available locally stored satellite data. This list is used for other functions to query and parsing. param: sdate - start date (datetime object) edate - end date (datetime object) twin - time window (temporal constraint) in minutes product - product as of satellite_specs.yaml dict_for_sub - dictionary for substitution in templates local_path - a path if defined return: pathlst - list of paths filelst - list of files """ filelst = [] pathlst = [] tmpdate = sdate - timedelta(minutes=twin) if path_local is None: print('path_local is None -> checking config file') while (tmpdate <= edate + relativedelta(months=+1)): try: # create local path for each time path_template = \ satellite_dict[product]['dst'].get( 'path_template') strsublst = \ satellite_dict[product]['dst'].get('strsub') subdict = \ make_subdict(strsublst, class_object_dict=dict_for_sub) path_local = make_pathtofile(path_template,\ strsublst,subdict) path_local = (os.path.join(path_local, tmpdate.strftime('%Y'), tmpdate.strftime('%m'))) print(path_local) if os.path.isdir(path_local): tmplst = np.sort(os.listdir(path_local)) filelst.append(tmplst) pathlst.append( [os.path.join(path_local, e) for e in tmplst]) tmpdate = tmpdate + relativedelta(months=+1) path_local = None except Exception as e: print(e) tmpdate = tmpdate + relativedelta(months=+1) filelst = np.sort(flatten(filelst)) pathlst = np.sort(flatten(pathlst)) else: filelst = np.sort(os.listdir(path_local)) pathlst = [os.path.join(path_local, e) for e in filelst] idx_start, tmp = check_date(filelst, sdate - timedelta(minutes=twin)) tmp, idx_end = check_date(filelst, edate + timedelta(minutes=twin)) if idx_end == 0: idx_end = len(pathlst) - 1 del tmp pathlst = np.unique(pathlst[idx_start:idx_end + 1]) filelst = np.unique(filelst[idx_start:idx_end + 1]) print(str(int(len(pathlst))) + " valid files found") return pathlst, filelst
def get_remote_files_cmems(**kwargs): ''' Download swath files from CMEMS and store them at defined location. Time stamps in file name stand for: from, to, creation ''' product = kwargs.get('product') sdate = kwargs.get('sdate') edate = kwargs.get('edate') twin = kwargs.get('twin',30) nproc = kwargs.get('nproc',1) mission = kwargs.get('mission','s3a') path_local = kwargs.get('path_local') dict_for_sub = kwargs.get('dict_for_sub') # credentials server = satellite_dict[product]['src']['server'] user, pw = get_credentials(remoteHostName = server) tmpdate = deepcopy(sdate) filesort = False path_template_src = satellite_dict[product]['src']\ ['path_template'] strsublst_src = satellite_dict[product]['src']\ ['strsub'] subdict_src = make_subdict(strsublst_src, class_object_dict=dict_for_sub) while (tmpdate <= edate): # create remote path path_remote = make_pathtofile(path_template_src,\ strsublst_src,subdict_src,\ date=tmpdate) if path_local is None: # create local path path_template_dst = satellite_dict[product]['dst']\ ['path_template'] strsublst_dst = satellite_dict[product]['dst']\ ['strsub'] subdict_dst = make_subdict(strsublst_dst, class_object_dict=dict_for_sub) path_local = make_pathtofile(path_template_dst,\ strsublst_dst,subdict_dst,\ date=tmpdate) filesort = True print ('# ----- ') print ('Chosen source: ') print (mission + ' values from ' + product + ': ' + server) print(path_remote) print ('# ----- ') # get list of accessable files ftp = FTP(server) ftp.login(user, pw) ftp.cwd(path_remote) content=FTP.nlst(ftp) #choose files according to sdate/edate tmplst=[] tmpdate_new = tmpdate-timedelta(minutes=twin) tmpdate_end = edate+timedelta(minutes=twin) while (tmpdate_new <= tmpdate_end): matchingtmp = [s for s in content if tmpdate_new.strftime('%Y%m%dT%H') in s ] tmplst = tmplst + matchingtmp tmpdate_new = tmpdate_new + timedelta(minutes=twin) matching = np.unique(tmplst) print(matching) # check if download path exists if not create if not os.path.exists(path_local): os.makedirs(path_local,exist_ok=True) # Download matching files print ('Downloading ' + str(len(matching)) + ' files: .... \n') print ("Used number of possible simultaneous downloads " + str(nproc) + "!") Parallel(n_jobs=nproc)( delayed(tmploop_get_remote_files)( i,matching,user,pw,server, path_remote,path_local ) for i in range(len(matching)) ) # update time tmpdate = datetime((tmpdate + relativedelta(months=+1)).year, (tmpdate + relativedelta(months=+1)).month,1) if filesort is True: # sort files print("Data is being sorted into subdirectories " \ + "year and month ...") filelst = [f for f in os.listdir(path_local) if os.path.isfile(os.path.join(path_local,f))] sort_files(path_local,filelst,product,mission) print ('Files downloaded to: \n', path_local)
def get_remote_files_eumetsat(**kwargs): ''' Download swath files from EUMETSAT and store them at defined location. This fct uses the SentinelAPI for queries. ''' product = kwargs.get('product') sdate = kwargs.get('sdate') edate = kwargs.get('edate') mission = kwargs.get('mission','s3a') path_local = kwargs.get('path_local') dict_for_sub = kwargs.get('dict_for_sub') api_url = kwargs.get('api_url') import sentinelsat as ss products = None dates = (sdate.strftime('%Y-%m-%dT%H:%M:%SZ'),\ edate.strftime('%Y-%m-%dT%H:%M:%SZ')) filesort = False if path_local is None: # create local path path_template = satellite_dict[product]['dst']\ ['path_template'] strsublst = satellite_dict[product]['dst']\ ['strsub'] subdict = make_subdict(strsublst, class_object_dict=dict_for_sub) path_local = make_pathtofile(path_template,\ strsublst, subdict,\ date=sdate) filesort = True query_dict = make_query_dict(product,mission) print(query_dict) if api_url is None: api_url_lst = \ satellite_dict[product]['src']['api_url'] for url in api_url_lst: print('Source:',url) try: user, pw = get_credentials(remoteHostName=url) api = ss.SentinelAPI(user, pw, url) products = api.query(area=None, date=dates,**query_dict) break except Exception as e: print(e) else: user, pw = get_credentials(remoteHostName = api_url) api = ss.SentinelAPI(user, pw, api_url) products = api.query(area=None, date=dates,**query_dict) if products is not None: # check if download path exists if not create if not os.path.exists(path_local): os.makedirs(path_local,exist_ok=True) api.download_all(products,directory_path=path_local) #api.download(product_id) else: print('No products found!') if filesort is True: # sort files print("Data is being sorted into subdirectories " \ + "year and month ...") filelst = [f for f in os.listdir(path_local) if os.path.isfile(os.path.join(path_local,f))] sort_files(path_local,filelst,product,mission) print ('Files downloaded to: \n', path_local)