def setup_data_files(setup_args_): user_conf_dir = f'{os.path.expanduser("~")}/.pyg2p/' fm.create_dir(user_conf_dir) list_files = { t: [os.path.join(t, f) for f in os.listdir(t) if f.endswith('.json')] for t in ('./templates', './configuration', './configuration/global') } for_user_to_copy = [ f for f in list_files['./configuration'] if not fm.exists(os.path.join(user_conf_dir, fm.filename(f))) ] templates_to_copy = [ f for f in list_files['./templates'] if not fm.exists( os.path.join(user_conf_dir, 'templates_samples', fm.filename(f))) ] data_files = [('pyg2p/configuration/', list_files['./configuration/global'])] if for_user_to_copy: data_files.append((user_conf_dir, for_user_to_copy)) if templates_to_copy: data_files.append( (os.path.join(user_conf_dir, 'templates_samples'), templates_to_copy)) setup_args_.update({'data_files': data_files})
def merge_with_user_conf(self): # it overwrites global config. If not config file for user is found, it creates an empty one. if not file_util.exists(self.config_file): self.user_vars = {'description': self.description} if self.init_dict: self.user_vars.update(self.init_dict) self.dump() else: self.user_vars = self._load() self.vars.update(self.user_vars)
def check_write(self): if not self.data_path: # user hasn't defined his own data folder for geopotentials. raise ApplicationException.get_exc(NO_VAR_DEFINED, self.data_path_var) if not file_util.exists(self.data_path, is_folder=True): raise ApplicationException.get_exc(NOT_EXISTING_PATH, self.data_path) if not file_util.can_write(self.data_path): raise ApplicationException.get_exc(NO_WRITE_PERMISSIONS, self.data_path)
def __init__(self): super().__init__() self.vars = {} # create user folder .pyg2p always if not file_util.exists(self.config_dir, is_folder=True): file_util.create_dir(self.config_dir) for f in os.listdir(self.config_dir): filepath = os.path.join(self.config_dir, f) # read all custom paths from ~/.pyg2p/*.conf files if file_util.is_conf(filepath): self.vars.update(self.load_properties(filepath)) self.geopotentials_path = self.get(self.geopotentials_path_var) self.intertables_path = self.get(self.intertables_path_var)
def get_filepath(self, grid_id, additional=None): filename = self.vars.get(grid_id) path = None if not filename: raise ApplicationException.get_exc(NO_GEOPOTENTIAL, grid_id) for folder in (additional, self.data_path, self.global_data_path): if folder and file_util.exists(os.path.join(folder, filename)): path = os.path.join(folder, filename) break if not path: additional = additional or "-G option not issued" raise ApplicationException.get_exc( NO_FILE_GEOPOTENTIAL, details= f'id:{grid_id}, filename:{filename}, Searched in: {self.data_path}, {self.global_data_path}, {additional}' ) self._log(f'USING GEOFILE {path}', 'INFO') return path
def check_conf(self): # it logs all files in intertables and geopotentials paths that are not used in configuration used_intertables = [ i['filename'] for i in self.intertables.vars.values() ] used_geopotentials = self.geopotentials.vars.values() intertables_folder_content = file_util.ls(self.intertables.data_path, 'npy') intertables_global_folder_content = file_util.ls( self.intertables.global_data_path, 'npy') geopotentials_folder_content = file_util.ls( self.geopotentials.data_path, 'npy') geopotentials_global_folder_content = file_util.ls( self.geopotentials.global_data_path, 'npy') for f in itertools.chain(intertables_folder_content, intertables_global_folder_content): if file_util.filename(f) not in used_intertables: self._log( f'Intertable file is not in configuration: {f} - You could delete it' ) for f in itertools.chain(geopotentials_folder_content, geopotentials_global_folder_content): if file_util.filename(f) not in used_geopotentials: self._log( f'Geopotential file is not in configuration: {f} - You could delete it' ) user_intertables = deepcopy(self.intertables.user_vars) for k, i in user_intertables.items(): fullpath = os.path.join(self.intertables.data_path, i['filename']) if not file_util.exists(fullpath): self._log( f'{fullpath} - Non existing. Removing item from intertables.json' ) del self.intertables.user_vars[k] self.intertables.dump()
def download_data(self, dataset): remote_path = dataset local_path = getattr(self.user, f'{dataset}_path') client = FTP(*self.ftp.access) self._log(f'=== Start downloading {remote_path} files to {local_path}', level='INFO') client.cwd(os.path.join(self.ftp.folder, remote_path)) filenames = client.nlst() numfiles = len(filenames) for i, f in enumerate(filenames): if f in ('.', '..', 'readme.txt'): continue local_filename = os.path.join(local_path, f) if file_util.exists(local_filename): self._log(f'[{i}/{numfiles}] Skipping existing file {f}', level='INFO') continue self._log(f'[{i}/{numfiles}] Downloading {f}') with open(local_filename, 'wb') as local_file: client.retrbinary(f'RETR {f}', local_file.write) self._log(f'=== Download finished: {remote_path}') client.quit() # close FTP connection