def upload_file(self, remote_dir, local_dir, local_file): self.ftp = self.ftpconnect() self.ftp.timeout = 600 bufsize = 1024 with open(os.path.join(local_dir, local_file), 'rb') as fp: try: self.ftp.cwd(remote_dir) except Exception as e: # directory doesn't not exists. create it. dirpath = remote_dir.replace('\\', '/') tmp = dirpath.split('/') dirs = [] for _ in tmp: if len(dirs) == 0: dirs.append(_) continue dirs.append(dirs[-1] + '/' + _) success = False expection = Exception for _ in dirs: try: self.ftp.mkd(_) success = True except Exception as e: expection = e e_str = str(e) if '550' in e_str and 'File exists' in e_str: continue if not success: raise expection logger.warn('create dir succeed {}'.format(remote_dir)) self.ftp.cwd(remote_dir) self.ftp.storbinary('STOR ' + local_file, fp, bufsize) self.close()
def csv_to_xy(r, x_name, y_name, scale_dict, x_bound=None, x_start=None, y_bound=None, remove_outlier=False): df = r.progress.copy().reset_index() # ['progress'] if df is None: logger.warn("empty df!") return [], [] if y_name not in list(df.columns): return None df.drop(df[np.isnan(df[x_name])].index, inplace=True) df.drop(df[np.isnan(df[y_name])].index, inplace=True) # pd = pd.dropna(axis=0, how='any') x = df[x_name] y = df[y_name] if x_bound is None: x_bound = x.max() if x_start is None: x_start = x.min() filter_index = (x <= x_bound) & (x >= x_start) x = x[filter_index] y = y[filter_index] if y_bound is not None: y[y > y_bound] = y_bound if remove_outlier: z_score = (y - y.mean()) / y.std() filter_index = z_score < 10.0 x = x[filter_index] y = y[filter_index] y = scale_dict[y_name](y) return x, y
def ftpconnect(self): ftp = FTP() ftp.set_debuglevel(0) ftp.connect(self.ftp_server, 21, timeout=60) ftp.login(self.username, self.password) logger.warn("login succeed") return ftp
def upload_file(self, remote_dir, local_dir, local_file): self.sftp = self.sftpconnect() try: self.sftp.cwd(remote_dir) except Exception as e: # directory doesn't not exists. create it. dirpath = remote_dir.replace('\\', '/') tmp = dirpath.split('/') dirs = [] for _ in tmp: if len(dirs) == 0: dirs.append(_) continue dirs.append(dirs[-1] + '/' + _) success = False expection = Exception for _ in dirs: try: self.sftp.mkdir(_) success = True except Exception as e: expection = e e_str = str(e) if '550' in e_str and 'File exists' in e_str: continue if not success: raise expection logger.warn('create dir succeed {}'.format(remote_dir)) self.sftp.cwd(remote_dir) self.sftp.put(local_dir + local_file) self.close()
def all_file_search(self, root_path, files, filter_length): all_files = self.ftp.nlst(root_path) assert all_files is not [] if len(all_files) == 1: try: assert self.ftp.size(all_files[0]) is not None files.append(all_files[0][filter_length:]) return except Exception as e: logger.warn("WARNING in all file {}".format(all_files)) logger.warn(traceback.format_exc()) for f in all_files: self.all_file_search(f, files, filter_length)
def log_file_copy(self, source_tester): assert isinstance(source_tester, Tester) shutil.rmtree(self.checkpoint_dir) shutil.copytree(source_tester.checkpoint_dir, self.checkpoint_dir) if os.path.exists(source_tester.results_dir): shutil.rmtree(self.results_dir) shutil.copytree(source_tester.results_dir, self.results_dir) else: logger.warn("[load warning]: can not find results dir") if os.path.exists(source_tester.log_dir): shutil.rmtree(self.log_dir) shutil.copytree(source_tester.log_dir, self.log_dir) else: logger.warn("[load warning]: can not find log dir") self._init_logger()
def import_hyper_parameters(task_name, record_date): """ return the hyper parameters of the experiment in task_name/record_date, which is stored in Tester. :param task_name: :param record_date: :return: """ logger.warn( "the function is deprecated. please check the ExperimentLoader as the new implementation" ) global tester assert isinstance(tester, Tester) load_tester = tester.load_tester(record_date, task_name, tester.root) args = argparse.Namespace(**load_tester.hyper_param) return args
def all_file_search(self, root_path, files, filter_length): if root_path[-1] != '/': root_path += '/' all_files = [root_path + x for x in self.sftp.listdir(root_path)] assert all_files is not [] if len(all_files) == 1: try: assert self.sftp.stat(all_files[0]).st_size is not None files.append(all_files[0][filter_length:]) return except Exception as e: logger.warn("WARNING in all file {}".format(all_files)) logger.warn(traceback.format_exc()) for f in all_files: if self.sftp.isdir(f): self.all_file_search(f, files, filter_length)
def add_summary_to_logger(self, summary, name='', simple_val=False, freq=20): """ [deprecated] see RLA.logger.log_from_tf_summary """ logger.warn( "add_summary_to_logger is deprecated. See RLA.logger.log_from_tf_summary." ) if "tensorboard" not in self.private_config["LOG_USED"]: logger.info("skip adding summary to tb") return if name not in self.summary_add_dict: self.summary_add_dict[name] = [] if freq > 0: summary_ts = int(self.time_step_holder.get_time() / freq) else: summary_ts = 0 if freq <= 0 or summary_ts not in self.summary_add_dict[name]: from tensorflow.core.framework import summary_pb2 summ = summary_pb2.Summary() summ.ParseFromString(summary) if simple_val: list_field = summ.ListFields() def recursion_util(inp_field): if hasattr(inp_field, "__getitem__"): for inp in inp_field: recursion_util(inp) elif hasattr(inp_field, 'simple_value'): logger.record_tabular(name + '/' + inp_field.tag, inp_field.simple_value) else: pass recursion_util(list_field) logger.dump_tabular() else: self.writer.add_summary(summary, self.time_step_holder.get_time()) self.writer.flush() self.summary_add_dict[name].append(summary_ts)
def sftpconnect(self): sftp = pysftp.Connection(self.sftp_server, username=self.username, password=self.password) logger.warn("login succeed") return sftp