def epoch_start(self, network): self.index = 0 self.last_train_error = None self.bar = progressbar.NullBar() if network.verbose and self.n_batches >= 2: self.bar = iters.make_progressbar(self.n_batches, show_output=True) self.bar.update(0)
def create_progressbar(*args, **kwargs): # Progress bars in stdout by default if 'fd' not in kwargs: kwargs['fd'] = sys.stdout if FLAGS.show_progressbar: return progressbar.ProgressBar(*args, **kwargs) return progressbar.NullBar(*args, **kwargs)
def test_key_extraction(self): empty = np.empty(0) estimates = np.array([[253, 175, 146, 139, 39, 24, 40, 196, 38], [162, 7, 17, 31, 86, 200, 98, 60, 166], [16, 181, 1, 53, 76, 95, 58, 44, 17], [48, 0, 86, 35, 156, 15, 97, 135, 201], [14, 44, 55, 24, 226, 225, 128, 116, 29], [16, 181, 1, 53, 76, 95, 58, 44, 17], [253, 175, 146, 139, 39, 24, 40, 196, 38], [162, 7, 17, 31, 86, 200, 98, 60, 166], ]) sa1 = sa.SA(empty, empty, empty, empty, TestSaUnit.KEYS, TestSaUnit.PLAIN) # bit_matrix is unused single_result = sa1.key_extraction(estimates, TestSaUnit.TRACES, 0, progressbar.NullBar(), num_attack_traces=5) three_results = sa1.key_extraction(estimates, TestSaUnit.TRACES, 0, progressbar.NullBar(), num_attack_traces=5, top_n_guesses=3) self.assertEqual(single_result, 66) self.assertTrue(np.array_equal(three_results, [66, 76, 124]))
def create_progress_bar(self, dynamic_msg=None): widgets = [ ' [batch ', progressbar.SimpleProgress(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ' ] if dynamic_msg is not None: widgets.append(progressbar.DynamicMessage(dynamic_msg)) if self.config_global.get('show_progress', True): return progressbar.ProgressBar(widgets=widgets) else: return progressbar.NullBar()
def test_subkey_out_of_range(self): """Tests whether an error is thrown when a subkey is requested that does not exist. A key is composed of 16 subkeys.""" traces = np.load('data/traces.npy') keys = np.load('data/key.npy') plain = np.load('data/plain.npy') sa_obj = sa.SA(traces[:4000], keys[:4000], plain[:4000], traces[4000:4030], keys[4000:4030], plain[4000:4030]) bar = progressbar.NullBar() self.assertRaises( ValueError, lambda: sa_obj.solve_subkey( 16, False, bar, 1, 15, num_traces=3999, hamming_weight=True))
def predict(self, path, threshold=0.5): """Predict the labels for a single file or directory of files Keyword arguments: path -- The path to the file(s) that we'll be evaluating. """ # Is this a single file, or a directory? screenshots = [] if os.path.isfile(path): screenshots = [path] elif os.path.isdir(path): screenshots = os.listdir(path) screenshots = [os.path.join(path, s) for s in screenshots] else: raise FileNotFoundError results = [] bar = progressbar.ProgressBar() if self.quiet: bar = progressbar.NullBar() for screenshot in bar(screenshots): # Load the image into memory img = None try: img = tf.keras.preprocessing.image.load_img( screenshot, target_size=(self.image_width, self.image_height)) img = tf.keras.preprocessing.image.img_to_array(img) img = np.expand_dims(img, axis=0) img = preprocess_input(img) except IsADirectoryError: print("\nWARN: Skipping directory: ", screenshot) continue except OSError: print("\nWARN: Skipping empty or corrupt file: ", screenshot) continue prediction = self.model.predict(img, batch_size=1) result = dict() result["filename"] = screenshot result["custom404"] = prediction[0][0] result["login"] = prediction[0][1] result["homepage"] = prediction[0][2] result["oldlooking"] = prediction[0][3] results.append(result) return results
def test_solve_single_subkey(self): """Tests whether the solve_subkey method returns correctly while using the hamming_weight leakage model""" traces = np.load('data/traces.npy') keys = np.load('data/key.npy') plain = np.load('data/plain.npy') sa_obj = sa.SA(traces[:4000], keys[:4000], plain[:4000], traces[4000:4030], keys[4000:4030], plain[4000:4030]) sa_obj.log_handler = LogHandler('test_sa', False) bar = progressbar.NullBar() # In our testing set 126 is the second subkey. self.assertEqual( sa_obj.solve_subkey(1, False, bar, 1, 15, num_traces=4000, hamming_weight=True), 126)
def null_bar(iterator, *args, text=None, **kwargs): """Generator that gives the defaylt muted progress bar for the project. It only logs the optionnal text. Args: iterator (iterator): Iterator of items to use the bar with. text (str): Text to log describing the current operation. Returns: generator object: Item handled by the progress bar. """ # log text immediately if text: logger.info(text) # create null progress bar with progressbar.NullBar(*args, **kwargs) as progress: for item in progress(iterator): yield item
def progressbar(cls, quiet=False, redirect_stderr=True, redirect_stdout=True, **pbar_options): if quiet or cls.config.quiet: return progressbar.NullBar(redirect_stderr=redirect_stderr, redirect_stdout=redirect_stdout, **pbar_options) return progressbar.ProgressBar(redirect_stderr=redirect_stderr, redirect_stdout=redirect_stdout, **pbar_options)
def uploadFile(self, typeId, filePath, waitForTranscode=True, progressBars=True, md5=None, section=None): if md5 == None: md5 = md5_sum(filePath) upload_uid = str(uuid1()) upload_gid = str(uuid1()) fname = os.path.basename(filePath) if section is None: section = "New Files" found = self.byMd5(md5) if found: print(f"File with {md5} found in db ({found['name']})") return False tus = TusClient(self.tusURL) chunk_size = 1 * 1024 * 1024 # 1 Mb uploader = tus.uploader(filePath, chunk_size=chunk_size) num_chunks = math.ceil(uploader.file_size / chunk_size) if progressBars: bar = progressbar.ProgressBar(prefix="Upload", redirect_stdout=True) else: bar = progressbar.NullBar() for _ in bar(range(num_chunks)): uploader.upload_chunk() # Initiate transcode. out = requests.post(self.url + '/Transcode' + "/" + self.project, headers=self.headers, json={ 'type': typeId, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, }) try: print("{}, {}".format(fname, out.json()['message'])) out.raise_for_status() except Exception as e: print("Error: '{}'".format(out.text)) return False if waitForTranscode == True: # Poll for the media being created every 5 seconds if progressBars: bar = progressbar.ProgressBar(prefix="Transcode", redirect_stdout=True) else: bar = progressbar.NullBar() #check quickly for the 1st half second then go slow for i in bar(count()): if i % 2 == 0: media = self.byMd5(md5) if media: bar.finish() break else: if i < 20: time.sleep(0.1) else: print("Waiting for transcode...") time.sleep(2.5) return True
def apply_batches(function, inputs, batch_size, show_progressbar=False, show_output=False, average_outputs=False): """ Splits inputs into mini-batches and passes them to the function. Function returns list of outputs or average loss in case if ``average_outputs=True``. Parameters ---------- function : func Function that accepts one or more positional inputs. Each of them should be an array-like variable that have exactly the same number of rows. inputs : tuple, list The arguemnts that will be provided to the function specified in the ``function`` argument. batch_size : int Mini-batch size. Defines maximum number of samples that will be used as an input to the ``function``. show_progressbar : bool When ``True`` than progress bar will be shown in the terminal. Defaults to ``False``. show_output : bool Assumes that outputs from the function errors. The ``True`` value will show information in the progressbar. Error will be related to the last epoch. Defaults to ``False``. average_outputs : bool Output from each batch will be combined into single average. This option assumes that loss per batch was calculated from. Defaults to ``False``. Returns ------- list List of function outputs. """ n_samples = count_samples(inputs) batch_size = n_samples if batch_size is None else batch_size n_batches = count_minibatches(inputs, batch_size) bar = progressbar.NullBar() if show_progressbar and n_batches >= 2: bar = make_progressbar(n_batches, show_output) bar.update(0) # triggers empty progressbar outputs = [] iterator = minibatches(inputs, batch_size, shuffle=False) for i, sliced_inputs in enumerate(iterator): output = function(*as_tuple(sliced_inputs)) outputs.append(output) kwargs = dict(loss=output) if show_output else {} bar.update(i, **kwargs) # Clean progressbar from the screen bar.fd.write('\r' + ' ' * bar.term_width + '\r') if average_outputs: # When loss calculated per batch separately it might be # necessary to combine error into single value return average_batch_errors(outputs, n_samples, batch_size) return outputs
def apply_batches(function, arguments, batch_size, description='', show_progressbar=False, show_error_output=True): """ Apply batches to a specified function. Parameters ---------- function : func Function that accepts one or more positional arguments. Each of them should be an array-like variable that have exactly the same number of rows. arguments : tuple, list The arguemnts that will be provided to the function specified in the ``function`` argument. batch_size : int Mini-batch size. description : str Short description that will be displayed near the progressbar in verbose mode. Defaults to ``''`` (empty string). show_progressbar : bool ``True`` means that function will show progressbar in the terminal. Defaults to ``False``. show_error_output : bool Assumes that outputs from the function errors. ``True`` will show information in the progressbar. Error will be related to the last epoch. Defaults to ``True``. Returns ------- list List of function outputs. """ if not arguments: raise ValueError("The argument parameter should be list or " "tuple with at least one element.") samples = arguments[0] n_samples = len(samples) batch_iterator = list(iter_batches(n_samples, batch_size)) if show_progressbar: widgets = [ progressbar.Timer(format='Time: %(elapsed)s'), ' |', progressbar.Percentage(), progressbar.Bar(), ' ', progressbar.ETA(), ] if show_error_output: widgets.extend([' | ', progressbar.DynamicMessage('error')]) bar = progressbar.ProgressBar( widgets=widgets, max_value=len(batch_iterator), poll_interval=0.1, ) bar.update(0) else: bar = progressbar.NullBar() outputs = [] for i, batch in enumerate(batch_iterator): sliced_arguments = [argument[batch] for argument in arguments] output = function(*sliced_arguments) outputs.append(output) if show_error_output: bar.update(i, error=np.atleast_1d(output).item(0)) else: bar.update(i) bar.fd.write('\r' + ' ' * bar.term_width + '\r') return outputs
def create_progress_bar(self, iterations): return progressbar.NullBar(max_value=iterations)
def uploadFile(self, typeId, filePath, waitForTranscode=True, progressBars=True, md5=None,section=None, fname=None): """ Upload a new file to Tator """ if md5==None: md5 = md5_sum(filePath) upload_uid = str(uuid1()) upload_gid = str(uuid1()) if fname is None: fname=os.path.basename(filePath) if section is None: section="New Files" tus = TusClient(self.tusURL) chunk_size=100*1024*1024 # 100 Mb uploader = tus.uploader(filePath, chunk_size=chunk_size, retries=10, retry_delay=15) num_chunks=math.ceil(uploader.get_file_size()/chunk_size) if progressBars: bar=progressbar.ProgressBar(prefix="Upload",redirect_stdout=True) else: bar=progressbar.NullBar() for _ in bar(range(num_chunks)): uploader.upload_chunk() mediaType = self.mediaTypeApi.get(typeId) if mediaType['dtype'] == 'video': endpoint = 'Transcode' else: endpoint = 'SaveImage' # Initiate transcode. out = requests.post(f'{self.url}/{endpoint}/{self.project}', headers=self.headers, json={ 'type': typeId, 'uid': upload_uid, 'gid': upload_gid, 'url': uploader.url, 'name': fname, 'section': section, 'md5': md5, }) try: print("{}, {}".format(fname, out.json()['message'])) out.raise_for_status() except Exception as e: print("Error: '{}'".format(out.text)) return False if (waitForTranscode == True) and (endpoint == 'Transcode'): # Poll for the media being created every 5 seconds if progressBars: bar=progressbar.ProgressBar(prefix="Transcode",redirect_stdout=True) else: bar=progressbar.NullBar() #check quickly for the 1st half second then go slow for i in bar(count()): if i % 2 == 0: media=self.byMd5(md5) if media: bar.finish() break else: if i < 20: time.sleep(0.1) else: print("Waiting for transcode...") time.sleep(2.5) #return media id if we waited for it return media['id'] # return true for async calls # TODO: Make this less hokey return True