def run_iperf(self, dst): eprint("\tRunning iperf on {} ...".format(dst)) self.receiver.start_iperf_server() time.sleep(0.1) result = self.sender.run_iperf(dst=dst, duration=30) self.receiver.stop_iperf_server() return result
def run(self): eprint("Testing native network") results = { "meta": self.meta, "native": self.run_suite(self.receiver.private_ip) } for crypto in CRYPTO: eprint("Running with crypto {}".format(crypto)) self.start_vpncloud(crypto=crypto) res = self.run_suite(self.receiver_ip_vpncloud) self.stop_vpncloud() results[str(crypto)] = res results['results'] = { "throughput_mbits": dict([(k, results[k]["iperf"]["throughput"] / 1000000.0) for k in ["native"] + CRYPTO]), "latency_us": dict([(k, dict([ (str(s), (results[k]["ping_%s" % s]["rtt_avg"] - results["native"]["ping_%s" % s]["rtt_avg"]) * 1000.0 / 2.0) for s in [100, 500, 1000] ])) for k in CRYPTO]) } return results
def model_instance(self, instance, history_index): """ Runs all the necessary steps to model a given AWS instance type and write the results to ES :param instance: :param history_index: :return: """ eprint("Fetching data for: {}".format(instance)) instance_history = history_index.search_terms( {"InstanceType": instance}) instance_history = ({ "Region": h.get("Region"), "Date": pandas.to_datetime( dateutil.parser.parse( h.get("Timestamp")).strftime("%Y-%m-%d")), "OS": h.get("ProductDescription").lower(), "Price": float(h.get("SpotPrice")), "AvailabilityZone": h.get("AvailabilityZone") } for h in instance_history) instance_az_history = self.split_data(instance_history) for region in instance_az_history: for os in instance_az_history[region]: self.model_data(instance, region, os, instance_az_history[region][os])
def load_config(): """ Load plugin configurations. :return: a namespace containing the configurations :rtype: SimpleNamespace """ # load with open(sys.argv[1], 'r') as f: plugin_config = yaml.safe_load(f) placeholders = plugin_config.get('placeholders') placeholders_file = plugin_config.get('placeholdersFile') resource_selectors = plugin_config.get('resourceSelectors', []) # validation validation_fail = False if not placeholders_file and not placeholders: c.eprint('One of placeholders or placeholdersFile is required') validation_fail = True if validation_fail: raise Exception() return SimpleNamespace( placeholders=placeholders, placeholders_file=placeholders_file, resource_selectors=resource_selectors )
def modify_scheduled_animation(self, schedule_entry): with self.__schedule_lock: job_found = False for i in range(0, len(self.__schedule_table)): if self.__schedule_table[i].JOB_ID == schedule_entry.JOB_ID: job_found = True # modify the arguments self.__animation_scheduler.modify_job( schedule_entry.JOB_ID, args=(schedule_entry.ANIMATION_SETTINGS, )) # and also reschedule it self.__animation_scheduler.reschedule_job( schedule_entry.JOB_ID, trigger=CronTrigger( year=schedule_entry.CRON_STRUCTURE.YEAR, month=schedule_entry.CRON_STRUCTURE.MONTH, day=schedule_entry.CRON_STRUCTURE.DAY, week=schedule_entry.CRON_STRUCTURE.WEEK, day_of_week=schedule_entry.CRON_STRUCTURE. DAY_OF_WEEK, hour=schedule_entry.CRON_STRUCTURE.HOUR, minute=schedule_entry.CRON_STRUCTURE.MINUTE, second=schedule_entry.CRON_STRUCTURE.SECOND)) # replace the schedule table entry self.__schedule_table[i] = schedule_entry break if not job_found: eprint("No scheduled animation with ID '%' found!" % str(schedule_entry.JOB_ID)) else: # save the modified table self.__save_schedule_table()
def clobber_disk(device): """Reset drive""" common.eprint("DELETING PARTITIONS.") device.clobber() disk = parted.freshDisk(device, "gpt") disk.commit() return disk
def can_tri(mask: np.ndarray, a: Tuple[int, int], b: Tuple[int, int], c: Tuple[int, int]) -> bool: # TODO: Reimplement, LOL minr = max(min(a[0], b[0], c[0]), 0) minc = max(min(a[1], b[1], c[1]), 0) maxr = min(max(a[0], b[0], c[0]) + 1, mask.shape[0]) maxc = min(max(a[1], b[1], c[1]) + 1, mask.shape[1]) R = maxr - minr C = maxc - minc if R <= 0 or C <= 0: eprint("What is going on?") temp = np.zeros([R, C], np.uint8) cv2.fillPoly( temp, np.array([[[a[1] - minc, a[0] - minr], [b[1] - minc, b[0] - minr], [c[1] - minc, c[0] - minr]]], 'int32'), 255) #cv2.line(temp, (a[1] - minc, a[0] - minr), (b[1] - minc, b[0] - minr), 255, 1) # Set all road pixels to 0 as well submask = mask[minr:maxr, minc:maxc] if temp.shape[0] != submask.shape[0] or temp.shape[1] != submask.shape[1]: eprint("Something went really wrong...") temp[submask > 0] = 0 # If any is found, then there is a collision if cv2.findNonZero(temp) is not None: return False return True
def get_resource(url, expected_sha256=None, patches=None): with urlopen(url) as f: data = f.read() if expected_sha256: sha256 = hashlib.sha256() sha256.update(data) actual_sha256 = sha256.hexdigest() if expected_sha256 != actual_sha256: c.eprint("sha256 checksum validation failed for", url) c.eprint("expected:", expected_sha256) c.eprint("actual: ", actual_sha256) raise Exception() if patches: for patch in patches: with tempfile.NamedTemporaryFile() as data_f: data_f.write(data) data_f.flush() try: proc = subprocess.run( ['patch', '--ignore-whitespace', '--output=-', data_f.name, patch], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8', check=True, ) data = proc.stdout.encode() except subprocess.CalledProcessError as e: c.eprint(e.stderr) c.eprint('failed to apply patch %s with exit status %s ' % (patch, e.returncode)) raise e return list(yaml.safe_load_all(data))
def load_config(): """ Load plugin configurations. :return: a namespace containing the configurations :rtype: SimpleNamespace """ # load with open(sys.argv[1], 'r') as f: plugin_config = yaml.safe_load(f) resources = plugin_config.get('resources', []) # validation validation_fail = False if not resources: c.eprint('resources is required') validation_fail = True for resource in resources: if 'url' not in resource: c.eprint('resources.url is required') validation_fail = True break if validation_fail: raise Exception() return SimpleNamespace( resources=resources )
def get_bid(self, region, instance, os, duration): """ Gets the bid from ElasticSearch for the given parameters :param region: :param instance: :param os: :param duration: :return: Pair [az, bid] - returns [None, -1] on not found/error """ if duration < 1: duration = 1 bid = self.__bid_index.get_doc( self.get_bid_es_key(region, instance, os)) if bid is None: return [None, -1] else: summary = bid.get("summary") n_days = len(summary) if duration > n_days: duration = n_days if self.n_days != n_days: self.n_days = n_days result = summary[duration - 1].split('/') if len(result) != 2: eprint( "Internal Error - Malformed Bid data for request: {}, {}, {}, {}" .format(region, instance, os, duration)) return [None, -1] else: return result
def processFile(infile, outfile, url, apiKey, snooze, skip_precheck): """ Process the input file - a list of email addresses to validate. Write results to outfile. Two pass approach. First pass checks file is readable and contains email addresses. Second pass calls validation. :param infile: :param outfile: :param url: str :param apiKey: str :param threads: int :param snooze: int """ if infile.seekable() and not skip_precheck: # Check & report syntactically-OK & bad email addresses before we start API-based validation, if we can f = csv.reader(infile) count_ok, count_bad = 0, 0 for r in f: if len(r) == 1: recip = r[0] try: validate_email(recip, check_deliverability=False) count_ok += 1 except EmailNotValidError as e: # email is not valid, exception message is human-readable eprint(f.line_num, recip, str(e)) count_bad += 1 else: count_bad += 1 eprint( 'Scanned input {}, contains {} syntactically OK and {} bad addresses. Validating with SparkPost..' .format(infile.name, count_ok, count_bad)) infile.seek(0) else: eprint( 'Skipping input file syntax pre-check. Validating with SparkPost..' ) f = csv.reader(infile) fList = [ 'email', 'valid', 'result', 'reason', 'is_role', 'is_disposable', 'is_free', 'did_you_mean' ] fh = csv.DictWriter(outfile, fieldnames=fList, restval='', extrasaction='ignore') fh.writeheader() for r in f: recip = r[0] res = validateRecipient(url, apiKey, recip, snooze) if res and 'results' in res: row = res['results'] row['email'] = recip fh.writerow(row) else: eprint('Error: response', res) infile.close() outfile.close() eprint('Done')
def get_sessions(logdir, user): user_sessions_dir = f"{logdir}/{user}-sessions" if not exists(user_sessions_dir) or \ stat(user_sessions_dir).st_ctime < \ stat(join(logdir, str(user))).st_mtime: eprint(f"Refreshing sessions for user {user} from logfile") split_sessions(str(user)) return [f for f in listdir(user_sessions_dir)]
def is_animation_running(self, animation_name): try: # get the animation animation = self.__all_animations[animation_name] except KeyError: eprint("The animation '%s' could not be found!" % animation_name) return False return animation.is_running()
def cvc4_objective_to_str(config, obj): """Yields CVC4's command to optimize a goal.""" if config.ignore_objs: direction = "minimize" if obj.minimize else "maximize" yield "; objectives are not supported\n" yield "; ({} {})\n".format(direction, obj.term) else: common.eprint("error: objectives are not supported") sys.exit(1)
def stop(self, timeout=None): # wait until the server is fully started if self.__server_started.wait(timeout=timeout): # shut it down self.__server.shutdown() self.__server.server_close() # mark the server as stopped self.__server_started.clear() else: eprint("Failed to shutdown 'WSGIRefServer'! Timeout was reached.")
def start_vpncloud(self, crypto=None): eprint("\tSetting up vpncloud on receiver") self.receiver.start_vpncloud(crypto=crypto, ip="{}/24".format( self.receiver_ip_vpncloud)) eprint("\tSetting up vpncloud on sender") self.sender.start_vpncloud( crypto=crypto, peers=["{}:3210".format(self.receiver.private_ip)], ip="{}/24".format(self.sender_ip_vpncloud)) time.sleep(1.0)
def add_dynamic_variant(self, file_name, file_content): """ This method adds a new variant to the animation. @param file_name: The name of the file. @param file_content: A open file(-like) object that can be processed by the animation. """ # error handling if not self.accepts_dynamic_variant: eprint("This animation does not support adding of new variants.") return self._add_dynamic_variant(file_name, file_content)
def save(self): if self.__config_file_path is not None: output = StringIO() for section_name, section in Config: # section heading print("[%s]" % section_name, file=output) # section description if section.description: print(section.description, file=output) for option_name, option in section: # option description if option.description: print(option.description, file=output) # option value value = self.get(option) # check for a unset value if value == configparser._UNSET: print("{} =".format(option_name), file=output) else: # dicts and lists must be converted to JSON if isinstance(value, (dict, list)): value = json.dumps(value) # preserve multiline strings -> starting from the second line indention is needed if (option.value_type == str and "\n" in value): value = "\n ".join(value.split("\n")) print("{} = {}".format(option_name, value), file=output) print(file=output) print(file=output) print(file=output) def write_config(): with open(self.__config_file_path, "w+") as f: output.seek(0) shutil.copyfileobj(output, f) if alpine.is_alpine_linux(): with alpine_rw(): write_config() else: write_config() else: eprint("This configuration object can't be saved!")
def remove_dynamic_variant(self, variant): """ This method removes a variant from the animation. @param variant: A variant of the animation that should exist in the animation_variants enum. """ # error handling if not self.accepts_dynamic_variant: eprint("This animation does not support removing of variants.") return for v in self.animation_variants: if (v.name == variant.name and v.value == v.value): self._remove_dynamic_variant(variant) return eprint(f"The variant '{variant.name}' could not be found.")
def _add_dynamic_variant(self, file_name, file_content): # error handling if file_name.rsplit(".", 1)[-1].lower() != "blm": eprint("The new variant file must be a blm-file!") return file_path = str((_ANIMATIONS_DIR / file_name).resolve()) def write_file(): with open(file_path, "wb+") as f: f.write(file_content.read()) if is_alpine_linux(): with alpine_rw(): write_file() else: write_file()
def __init__(self, data_generator): # Init a predictor eprint("Converting data to DataFrame") self._int_df = pd.DataFrame(data_generator) eprint("Setting Target") self._target_price = self._int_df['SpotPrice'] eprint("Pre-Processing") self._pre_process_train_data() eprint("Training") self.train()
def remove_scheduled_animation(self, schedule_id): with self.__schedule_lock: job_found = False for i in range(0, len(self.__schedule_table)): if self.__schedule_table[i].JOB_ID == schedule_id: job_found = True self.__animation_scheduler.remove_job(schedule_id) # remove the entry from the table del self.__schedule_table[i] break if not job_found: eprint("No scheduled animation with ID '%' found!" % str(schedule_id)) else: # save the modified table self.__save_schedule_table()
def model_data(self, instance, region, os, data): """ Models data for a given set of input parameters and writes results to ES :param instance: :param region: :param os: :param data: list of fields (dictionary), fields: Date, Price :return: None """ estimates = {} for az in data: estimates[az] = [] for days in range(1, self.n_days + 1): estimates[az].append(self.predict(data[az], days)) if max(estimates[az]) == 999: estimates.pop(az) else: estimates[az] = [999 if isnan(x) else x for x in estimates[az]] summary = [] az_max = {} for day in range(0, self.n_days): min_az = None min_price = -1 for az in estimates: az_max[az] = max(az_max.get(az, -1), estimates[az][day]) if min_az is None or az_max[az] < min_price: min_az = az min_price = az_max[az] if min_az is not None: summary.append("{}/{}".format(min_az, min_price)) else: summary.append("{}/{}".format(min_az, 999)) if len(estimates) > 0: estimates["summary"] = summary eprint("Trained: {}, {}, {}".format(region, instance, os)) self.__bid_index.write(estimates, self.get_bid_es_key(region, instance, os))
def parse_dotenv(filename): """ Load and parse a file in the dotenv format. Exit on error. :param str filename: the path the the file to load :return: a dict of key/value loaded from the file :rtype: dict """ values = {} linenum = 0 try: with open(filename, 'r') as f: for line in f: line = line.rstrip('\n') linenum += 1 if line and not line.startswith('#'): key, value = line.split('=', 1) values[key] = value except ValueError: c.eprint('%s: line %s: invalid format' % (filename, linenum)) raise Exception() return values
def extract_zip(): # check if the root element is a single directory if (len(info) > 1 and info[0].is_dir()): # extract the zip-file directly extract_path = _ANIMATIONS_DIR.resolve() if (_ANIMATIONS_DIR / info[0].filename.rsplit(".", 1)[0]).exists(): eprint(f"The variant '{info[0].filename.rsplit('.', 1)[0]}' already exists.") return else: # if not, try to create a directory with the file name extract_path = (_ANIMATIONS_DIR / file_name.rsplit(".", 1)[0]).resolve() if extract_path.exists(): eprint(f"The variant '{extract_path.name}' already exists.") return else: extract_path.mkdir(parents=True) # extract the zip file zip_file.extractall(path=str(extract_path))
def _add_dynamic_variant(self, file_name, file_content): # error handling if file_name.rsplit(".", 1)[-1].lower() != "zip": eprint("The new variant file must be a zip-file!") return with ZipFile(file_content, "r") as zip_file: info = zip_file.infolist() if len(info) > 0: def extract_zip(): # check if the root element is a single directory if (len(info) > 1 and info[0].is_dir()): # extract the zip-file directly extract_path = _ANIMATIONS_DIR.resolve() if (_ANIMATIONS_DIR / info[0].filename.rsplit(".", 1)[0]).exists(): eprint(f"The variant '{info[0].filename.rsplit('.', 1)[0]}' already exists.") return else: # if not, try to create a directory with the file name extract_path = (_ANIMATIONS_DIR / file_name.rsplit(".", 1)[0]).resolve() if extract_path.exists(): eprint(f"The variant '{extract_path.name}' already exists.") return else: extract_path.mkdir(parents=True) # extract the zip file zip_file.extractall(path=str(extract_path)) if is_alpine_linux(): with alpine_rw(): extract_zip() else: extract_zip() else: eprint("The zip-file was empty.")
def run(self): while not self._stop_event.is_set(): start_time = time.time() if not self._pause_event.is_set(): # if the animation is still marked as paused, unset it here if self.__animation_paused.is_set(): # also notifies the resume method that now the animation is running again self.__animation_paused.clear() # add the next frame to the frame queue try: more = self.render_next_frame() except Exception as e: eprint("During the execution of the animation the following error occurred:") eprint(repr(e)) break # check if the animation has finished if not more: # check for more iterations if self.is_next_iteration(): # decrease iteration count self.__remaining_repeat -= 1 # start a new iteration continue else: # stop here break else: # notify the pause method that the animation is now paused self.__animation_paused.set() # limit fps = animation_speed - the execution time wait_time = self.__animation_speed - (time.time() - start_time) if wait_time > 0: self._stop_event.wait(wait_time) # now the animation has stopped, so call the finish callable self.__on_finish_callable()
def __enter__(self): if not is_alpine_linux(): eprint( "Not running on Alpine Linux. So no changes to the filesystem will be made." ) return if "LBU_MEDIA" not in _ALPINE_LBU_CONF: eprint( "No Alpine Linux diskless installation detected. So no changes to the filesystem will be made." ) return if not self.__is_root(): raise RuntimeError( "To enable disk write access on Alpine Linux in diskless mode root permissions are necessary!" ) self.__mount_target = str(_ALPINE_MEDIA_DIR / _ALPINE_LBU_CONF["LBU_MEDIA"]) # remount root filesystem rw self.__remount(self.__mount_target, ro=False)
def __start_animation(self, event): try: # get the new animation animation = self.__all_animations[ event.event_settings.animation_settings.animation_name] except KeyError: eprint("The animation '%s' could not be found!" % event.event_settings.animation_settings.animation_name) else: # create the animation thread instance animation_thread = animation.create_animation( event.event_settings.animation_settings) # check if the current animation should be paused if (event.event_settings.pause_current_animation and self.__current_animation is not None): # if so, pause it and add it to the pause stack animation_to_pause = self.__current_animation paused_thread = animation_to_pause.pause_animation() if paused_thread is not None: self.__pause_queue.put((animation_to_pause, paused_thread)) else: # if an animation should be started without pausing the current one, # clear the pause queue, because afterwards no resuming should be done if not self.__pause_queue.empty(): # force clearing of pause queue self.__pause_queue.queue.clear() # stop any currently running animation self.__stop_animation() # start it animation.start_animation(animation_thread) self.__current_animation = animation # special: set the thread (for blocking until animation has finished) event.start_animation_thread = animation_thread
def parse_value(self, value): if isinstance(value, self.value_type): return value elif (isinstance(value, (dict, list)) and self.value_type == str): # convert it to a JSON string return json.dumps(value) elif (isinstance(value, str) and self.value_type in (dict, list)): # load it from JSON string try: parsed_v = json.loads(value) except ValueError: eprint( "Parameter '%s' could not be parsed! Is it valid JSON?" % self.name) parsed_v = self.value_type() return parsed_v elif self.value_type == bool: return bool(strtobool(value)) else: try: return self.value_type(value) except ValueError: raise ValueError("The value '%s' is not of type '%s'!" % (str(value), str(self.value_type)))