def get_list_dates_for_dataset(product_code, sub_product_code, version, start_date=None, end_date=None): # Manage the dates if (start_date != None) or (end_date != None): # Get the frequency from product table product_info = querydb.get_product_out_info( productcode=product_code, subproductcode=sub_product_code, version=version) frequency_id = product_info[0].frequency_id dateformat = product_info[0].date_format cDataset = datasets.Dataset(product_code, sub_product_code, '', version=version) cFrequency = cDataset.get_frequency(frequency_id, dateformat) # Build the list of dates date_start = cFrequency.extract_date(str(start_date)) if (end_date != '' and end_date is not None): date_end = cFrequency.extract_date(str(end_date)) else: date_end = datetime.date.today() list_dates = cFrequency.get_internet_dates( cFrequency.get_dates(date_start, date_end), '%Y%m%d') else: list_dates = None return list_dates
def create_permanently_missing_for_dataset(product_code, sub_product_code, version, mapset_code, start_date=None, end_date=None): # Get the existing dates for the dataset product = products.Product(product_code, version=version) missing_filenames = product.get_missing_filenames({ 'product': product_code, 'version': version }) # Manage the dates if (start_date != None) or (end_date != None): # Get the frequency from product table product_info = querydb.get_product_out_info( productcode=product_code, subproductcode=sub_product_code, version=version) frequency_id = product_info[0].frequency_id dateformat = product_info[0].date_format cDataset = datasets.Dataset(product_code, sub_product_code, '', version=version) cFrequency = cDataset.get_frequency(frequency_id, dateformat) # Build the list of dates date_start = cFrequency.extract_date(str(start_date)) if (end_date != '' and end_date is not None): date_end = cFrequency.extract_date(str(end_date)) else: date_end = datetime.date.today() list_dates = cFrequency.get_internet_dates( cFrequency.get_dates(date_start, date_end), '%Y%m%d') else: list_dates = None return list_dates
def syncGeoserver(): # # Copy some 'relevant' datasets to GeoServer # Selection of datasets is done on the basis of the product.geoserver table # # Get list of all 'relevant' subproducts (see 2. above) list_active_geoserver = esTools.get_activated_geoserver() # Loop over existing sub_products for geoserver_sprod in list_active_geoserver: # Extract local variable: my_prod = geoserver_sprod.productcode my_subprod = geoserver_sprod.subproductcode my_version = geoserver_sprod.version start_date = geoserver_sprod.startdate end_date = geoserver_sprod.enddate logger.info("Working on Product/Subproduct/Version: {0}/{1}/{2}".format(my_prod, my_subprod, my_version)) # Manage dates from bigint to datetime if functions.is_date_yyyymmdd(str(start_date), silent=True): date_start = datetime.datetime.strptime(str(start_date), '%Y%m%d').date() else: date_start = None if functions.is_date_yyyymmdd(str(end_date), silent=True): date_end = datetime.datetime.strptime(str(end_date), '%Y%m%d').date() else: date_end = None # Get additional products info product_info = querydb.get_product_out_info(productcode=my_prod, subproductcode=my_subprod, version=my_version) # my_mapset = subprod.mapsetcode my_type = product_info[0].product_type my_category = product_info[0].category_id # Create a Product object (to get mapsets) my_product = products.Product(my_prod, version=my_version) my_mapsets = my_product.mapsets if len(my_mapsets) > 1: logger.info('More than 1 mapset exists. Take the first') if len(my_mapsets) == 0: logger.warning('No any mapset exists. Skip.') continue my_mapset = my_mapsets[0] # Create a Dataset object (to get file list) # If data_start is not set (e.g. for 10davg prod) create w/o dates if date_start: my_dataset = datasets.Dataset(my_prod, my_subprod, my_mapset, version=my_version, from_date=date_start, to_date=date_end) if my_dataset._frequency.dateformat == 'MMDD': logger.warning('Product of type MMDD: date specification not supported. Skip.') continue file_list = my_dataset.get_filenames_range() else: my_dataset = datasets.Dataset(my_prod, my_subprod, my_mapset, version=my_version) file_list = my_dataset.get_filenames() # Check that there is at least 1 file if len(file_list) > 0: # Check the Workspace exists, or create it my_workspace = esTools.setWorkspaceName(my_category, my_prod, my_subprod, my_version, my_mapset, nameType=geoserverREST.geoserverWorkspaceName) if not geoserverREST.isWorkspace(my_workspace): geoserverREST.createWorkspace(my_workspace) # Loop over files and upload for my_file in file_list: my_date = functions.get_date_from_path_full(my_file) # if subprod in list_active_subprods: logger.debug("Working on Product/Subproduct/Version/Mapset/Date: {0}/{1}/{2}/{3}/{4}".format( my_prod, my_subprod, my_version, my_mapset, my_date)) # Upload the file and register esTools.uploadAndRegisterRaster(my_category, my_prod, my_subprod, my_version, my_mapset, my_date, my_type, local_data_dir)