def test_redirect(self): # this is the path of the log file that is used in this test log_file = "test-out.log" # delete the log file if exists already if os.path.isfile(log_file): os.remove(log_file) # write some text to the log file, which is supposed to be overwritten by the logger with open(log_file, "w") as f: f.write("Veni, vidi, vici!\n") f.write("Alea iacta est!") streamtologger.redirect(target=log_file, print_to_screen=False, append=False, header_format="test> ") print("line 1") print("line 2.1", end="") print(", line 2.2") self.assertEqual(["test> line 1", "test> line 2.1, line 2.2"], self._read_file(log_file)) # delete created log file again os.remove(log_file)
def logger(parameter): # prefix = os.path.dirname(os.path.realpath(__file__))+"/" name = parameter.name # if not "/" in name: # name=prefix+name logname = name+".log" streamtologger.redirect(target=logname,header_format="[{timestamp:%Y-%m-%d %H:%M:%S}] ")
def redirect_to_file(log_file_path, append=False, format="[{timestamp:%Y-%m-%d %H:%M:%S} - {level:5}] "): """ Print to log file and stdout simultaneously. Use reset_redirect() to only print to stdout again """ if not exists(dirname(log_file_path)): makedirs(dirname(log_file_path)) streamtologger.redirect(log_file_path, append=append, header_format=format)
def main(conf: config.Config): # create the output directory if it does not exist yet if not os.path.isdir(conf.output_dir): os.mkdir(conf.output_dir) # set up logging streamtologger.redirect( target=os.path.join(conf.output_dir, LOG_FILE_NAME), print_to_screen=not conf.quiet, header_format="[{timestamp:%Y-%m-%d %H:%M:%S} - {level:5}] ", append=False) # print command that was used to run this application print("$", APP_NAME, " ".join(sys.argv[1:])) print() # print the provided configuration _print_config(conf) # seed RNG if possible if conf.seed is not None: print("seeding RNG with {}".format(conf.seed)) random.seed(conf.seed) print("OK\n") # look for data, and download it if necessary print("looking for data...") if conf.data is None: data_path = os.path.join(conf.output_dir, DATA_FILENAME) if os.path.isfile(data_path): print("discovered data at '{}'".format(data_path)) else: print("downloading data to '{}'...".format(data_path)) urllib.request.urlretrieve(DATA_URL, data_path) conf.data = data_path print("OK\n") # load the data from disk print("loading data from '{}'...".format(conf.data)) data = _load_data(conf.data) print("found data about {} countries".format(len(data))) print("OK\n") # invoke dataset generator to create the required datasets print("generating {} dataset{} with {} training sample{}\n".format( conf.num_datasets, "s" if conf.num_datasets > 1 else "", conf.num_training_samples, "s" if conf.num_training_samples > 1 else "")) generator = data_gen.DatasetGenerator(data, conf.setting, dlv_solver.DlvSolver(conf.dlv), ONTOLOGY, conf.class_facts) generator.generate_datasets(conf.num_datasets, conf.num_training_samples, conf.output_dir)
def run(self) -> None: """Starts the training. This method first runs all of the necessary preparatory actions, and then launches the actual experiment. """ # redirect entire output to a log file streamtologger.redirect(target=os.path.join(self._conf.results_dir, self._conf.training_log), print_to_screen=False, append=False, header_format=expbase.LOG_LINE_HEADER) # initialize this training executor self._init() # start training self._run_training()
def log(self, event=None): if not self.logging: streamtologger.redirect( target="nse.log", header_format="[{timestamp:%Y-%m-%d %H:%M:%S} - {level:5}] ") self.logging = True self.options.entryconfig(self.options.index(2), label="Logging: On (Ctrl+L)") messagebox.showinfo(title="Started", message="Debug Logging has been enabled.") elif self.logging: sys.stdout = self.stdout sys.stderr = self.stderr streamtologger._is_redirected = False self.logging = False self.options.entryconfig(self.options.index(2), label="Logging: Off (Ctrl+L)") messagebox.showinfo(title="Stopped", message="Debug Logging has been disabled.")
def main(conf: config.Config): # create output directory if it does not exist yet if not os.path.isdir(conf.output_dir): os.mkdir(conf.output_dir) # set up logging streamtologger.redirect( os.path.join(conf.output_dir, LOG_FILE_NAME), print_to_screen=not conf.quiet, append=False, header_format=LOG_FILE_HEADER ) # seed RNG random.seed(conf.seed) # print user-defined configuration to screen _print_config(conf) # run generator generator.Generator.generate(conf)
from smb.SMBConnection import SMBConnection from nmb.NetBIOS import NetBIOS import os import sys import time import socket import hashlib import random import string import pprint import logging # INSTALL this package using "pip3 install git+https://github.com/phohenecker/stream-to-logger" # This helps redirect all print statements to a file for later examination import streamtologger streamtologger.redirect(target="./passpr3y_output.txt") # Get rid of dem warnings, this a gottam hak tool from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # Disable logging from pysmb logging.getLogger('SMB').setLevel(logging.CRITICAL) logging.getLogger('NMB').setLevel(logging.CRITICAL) # Console colors G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white
current_run_dir = params.results_dir + "/" + time.strftime( "%d:%m") + "_" + time.strftime("%H:%M:%S") + params.save_title params.current_run_dir = current_run_dir makedirs(current_run_dir) copy2('models_attention_bottom_separate.py', current_run_dir) copy2('train_attention_bottom.py', current_run_dir) copy2('data_attention_bottom.py', current_run_dir) copy2('eval_attention.py', current_run_dir) copy2( os.path.join(params.directory_expl_to_labels, "models_expl_to_labels.py"), '.') from models_expl_to_labels import ExplToLabelsNet copy2("models_expl_to_labels.py", current_run_dir) streamtologger.redirect(target=current_run_dir + '/log.txt') # set gpu device torch.cuda.set_device(params.gpu_id) # print parameters passed, and all parameters print('\ntogrep : {0}\n'.format(sys.argv[1:])) print(params) """ SEED """ np.random.seed(params.seed) random.seed(params.seed) torch.manual_seed(params.seed) torch.cuda.manual_seed(params.seed) """
parser.add_argument("--eval_batch_size", type=int, default=32) parser.add_argument("--directory_expl_to_labels", type=str, default='') parser.add_argument("--state_path_expl_to_labels", type=str, default='') eval_params = parser.parse_args() if not os.path.exists("copy_models_attention_bottom_separate.py"): shutil.copy( os.path.join(eval_params.directory, "models_attention_bottom_separate.py"), "copy_models_attention_bottom_separate.py") from copy_models_attention_bottom_separate import eSNLIAttention streamtologger.redirect(target=os.path.join( eval_params.directory, time.strftime("%d:%m") + "_" + time.strftime("%H:%M:%S") + 'log_eval.txt')) # attention model state_att = torch.load( os.path.join(eval_params.directory, eval_params.state_path)) model_config_att = state_att['config_model'] model_state_dict = state_att['model_state'] att_net = eSNLIAttention(model_config_att).cuda() att_net.load_state_dict(model_state_dict) params = state_att['params'] assert params.separate_att == eval_params.separate_att, "params.separate_att " + str( params.separate_att) params.word_vec_expl = model_config_att['word_vec'] params.current_run_dir = eval_params.directory params.eval_batch_size = eval_params.eval_batch_size
parser.add_argument("--gpu", type=int, default=None, help="for printing purposes only") parser.add_argument("--seed", type=int, default=1234, help="seed") # saved models paths parser.add_argument("--directory", type=str, default='') parser.add_argument("--state_path", type=str, default='') params = parser.parse_args() params.current_run_dir = params.directory_att_model streamtologger.redirect(target=os.path.join( params.current_run_dir, time.strftime("%d:%m") + "_" + time.strftime("%H:%M:%S") + 'log_eval.txt')) """ ALL DATA, some will only be needed for eval for we want to build glove vocab once """ preproc = params.preproc_expl + "_maxtokens_" + str(params.max_tokens) + "_" train_path = os.path.join('dataset', params.train_set) train = get_train(train_path, preproc, params.min_freq, params.n_train) snli_dev = get_dev_test_with_expl(params.esnli_path, 'dev', preproc, params.min_freq) expl_sentences_train = train['expl_1'] word_index_train = get_word_dict(expl_sentences_train) expl_sentences = train['expl_1'] + snli_dev['expl_1'] + snli_dev[ 'expl_2'] + snli_dev['expl_3'] word_index = get_word_dict(expl_sentences)
#!/usr/local/bin/python3 # coding: utf-8 import sys import os import streamtologger prefix = os.path.dirname(os.path.realpath(__file__)) + "/" logname = sys.argv[0].split('.')[0] + ".log" if not "/" in logname: logname = prefix + logname streamtologger.redirect(target=logname, header_format="[{timestamp:%Y-%m-%d %H:%M:%S}] ")
"\n" "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n" "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n" "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n" "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n" "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n" "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n" "SOFTWARE." ) __license__ = "MIT License" __version__ = "2017.1" __date__ = "Jul 05, 2017" __maintainer__ = "Patrick Hohenecker" __email__ = "*****@*****.**" __status__ = "Production" # save stdout/stderr to the file "./out.log" (in addition to the usual printing) streamtologger.redirect(target="./out.log", append=False, header_format="[{timestamp:%Y-%m-%d %H:%M:%S} - {level:5}] ") # a few test prints print("line 1") print("line 2\nline 3.1", end="") print(" line 3.2") print("line 4") # the next line causes an error, which is logged as well x = "not a number" / 2 # have a look at the file ./out.log!!!
parser.add_argument("--train_snli_classif", action='store_true', dest='train_snli_classif') parser.add_argument("--use_prototype_senteval", action='store_true', dest='use_prototype_senteval') parser.add_argument("--do_image_caption", action='store_true', dest='do_image_caption') parser.add_argument("--cudnn_nondeterministic", action='store_false', dest='cudnn_deterministic') eval_params = parser.parse_args() streamtologger.redirect(target=eval_params.directory + '/log_eval.txt') state = torch.load(os.path.join(eval_params.directory, eval_params.state_path)) model_config = state['config_model'] model_state_dict = state['model_state'] params = state['params'] params.eval_batch_size = eval_params.eval_batch_size params.current_run_dir = eval_params.directory params.train_snli_classif = eval_params.train_snli_classif params.use_prototype_senteval = eval_params.use_prototype_senteval params.do_image_caption = eval_params.do_image_caption params.cudnn_deterministic = eval_params.cudnn_deterministic """ SEED
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n" "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n" "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n" "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n" "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n" "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n" "SOFTWARE.") __license__ = "MIT License" __version__ = "2017.1" __date__ = "Jul 05, 2017" __maintainer__ = "Patrick Hohenecker" __email__ = "*****@*****.**" __status__ = "Production" # save stdout/stderr to the file "./out.log" (in addition to the usual printing) streamtologger.redirect( target="./out.log", append=False, header_format="[{timestamp:%Y-%m-%d %H:%M:%S} - {level:5}] ") # a few test prints print("line 1") print("line 2\nline 3.1", end="") print(" line 3.2") print("line 4") # the next line causes an error, which is logged as well x = "not a number" / 2 # have a look at the file ./out.log!!!
import streamtologger import logging log_file = "log_test.log" open(log_file, "w").close() streamtologger.redirect(target=log_file, print_to_screen=False) logging.basicConfig( filename=log_file, filemode='w', level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') a = 5 b = 0 logging.info(msg=f"\nA= {a}\nB= {b}\n") try: c = a / b except Exception as e: logging.error("Division by 0 test", exc_info=True)