def try_for_each_union_member( members: Sequence[Type[Any]], try_get: Callable[[Type[Any]], Result[str, T]]) -> Result[str, T]: for member in members: res = try_get(member) if res.is_ok(): return Ok(res.unwrap()) reasons = "\n".join(as_err(try_get(member)) for member in members) return Err(f"Failed on all union members. Failures:\n{reasons}")
def has_mechanisms( self, cb: Callable[[GCGlobalMechanisms], bool]) -> FailableBool: if self.trace_gc.GlobalHeapHistory is None: return Err("null GlobalHeapHistory") else: return Ok( cb( GCGlobalMechanisms( self.trace_gc.GlobalHeapHistory.GlobalMechanisms)))
def set_action_(self, action: V) -> Result[None, ValueError]: ''' Sets the next_state for this obervation. ''' try: self.action = action return Ok(None) except ValueError as error: return Err(ValueError(error))
def get_next_state(self, **kwargs: Any) -> Result[U, ValueError]: ''' Returns a next_state stored in this observation. ''' try: return Ok(self.transition) except ValueError as error: return Err(ValueError(error))
def set_state_(self, state: S) -> Result[None, ValueError]: ''' Sets the state for this obervation. ''' try: self.state = state return Ok(None) except ValueError as error: return Err(ValueError(error))
def set_next_state_(self, next_state: U) -> Result[None, ValueError]: ''' Sets the next_state for this obervation. ''' try: self.transition = next_state return Ok(None) except ValueError as error: return Err(ValueError(error))
def get_reward(self, **kwargs: Any) -> Result[T, ValueError]: ''' Returns a reward stored in this observation. ''' try: return Ok(self.reward) except ValueError as error: return Err(ValueError(error))
def set_reward_(self, reward: T) -> Result[None, ValueError]: ''' Sets the reward to this value. ''' try: self.reward = reward return Ok(None) except ValueError as error: return Err(ValueError(error))
def get_action(self, **kwargs: Any) -> Result[V, ValueError]: ''' Returns an action stored in this observation. ''' try: return Ok(self.action) except ValueError as error: return Err(ValueError(error))
def get_error_dirs(error_dir, archive_dir): """Retrieve a list of subdirectories that meet the error dir criteria Error formatted directories are simply a DT_FORMAT_STR datetime Positional Arguments: error_dir -- string to the directory to look in archive_dir -- string path of the directory to archive succes to, used to construct ReadyDirs Returns: Result -- Ok([wtde.ReadyDir]), Err() if no directories found """ sub_dirs = [] error_dirs = [] fail_list = [] try: sub_dirs = os.listdir(error_dir) except FileNotFoundError: pass for sd in sub_dirs: d = os.path.join(error_dir, sd) play_dt = None try: play_dt = dt.strptime(sd, DT_FORMAT_STR) except ValueError as e: if 'does not match format' in str( e) or 'unconverted data remains' in str(e): print('Directory {} name not in error format, skipping'.format( sd)) pass else: fail_list.append(e) pass # check to make sure we have at least 3 images in the given directory image_file_list = [ f for f in os.listdir(d) if imghdr.what(os.path.join(d, f)) ] if len(image_file_list) < 3: print('There are less than 3 images in {}. Skipping.'.format(d)) continue # we have enough images in a directory we expected. rd = ReadyDir(files=image_file_list, directory=d, archive_dir=archive_dir, error_dir=error_dir, play_dt=play_dt, remove_srcdir=True) error_dirs.append(rd) if len(error_dirs) == 0: return Err(fail_list) return Ok(error_dirs)
def check(self, query): """ :param query: """ if query.get_type() not in {Keyword.SELECT}: # Only select queries need to be checked here # All others are not affected by this rule. Bailing out. return Ok(True) earliest_date = query.get_earliest_date() if earliest_date >= self.min_start_date: return Ok(True) if query.limit_stmt: return Ok(True) return Err(("Querying for data before {} is prohibited. " "Your beginning date is {}, which is before that.").format(self.min_start_date.strftime("%Y-%m-%d"), earliest_date))
def _get_per_heap_histories(gc: AbstractTraceGC) -> Sequence[Result[str, AbstractGCPerHeapHistory]]: n = len(gc.PerHeapHistories) if n != gc.HeapCount: print( f"WARN: GC {gc.Number} has {gc.HeapCount} heaps, but {n} PerHeapHistories. It's a " + f" It's a {get_gc_kind_for_abstract_trace_gc(gc).name}." ) return repeat(Err("GC has wrong number of PerHeapHistories"), gc.HeapCount) else: return [Ok(h) for h in gc.PerHeapHistories]
def parse(self) -> Result[Tuple[Token, str], bool]: tk = next(self._stream) while tk.kind != TokenType.EOF: r_expect = self._expect(TokenType.MNE, tk) if r_expect.err(): return r_expect tk = next(self._stream) return Ok()
def _check_page_in_titles( self, maybe_page: Result[str, str]) -> Result[str, str]: if maybe_page.is_err(): return maybe_page else: page = maybe_page.value.replace(' ', '_') if page in self._wiki_titles: return Ok(page) else: return Err(f'page="{page}" not in wikipedia titles')
def preprocess(self, inputs: Any) -> Result[Tensor, ValueError]: ''' Preprocesses inputs using the preprocessor. ''' if not self.preprocessor_exists(): try: return Ok(self.preprocessor(inputs)) except Exception as error: return Err(ValueError(error)) else: return Err(ValueError('tried to call <CNN>.preprocessor on a None value'))
def _get_mark_times(clr: Clr, gc: AbstractTraceGC, hp_i: int) -> Failable[AbstractMarkInfo]: m = gc.PerHeapMarkTimes if m is None: return Err("No PerHeapMarkTimes") else: res = clr.PythonnetUtil.TryGetValue(m, hp_i) if res is None: return Err(f"PerHeapMarkTimes contains no heap {hp_i}") else: return Ok(res)
def __find_vin__(next_vins): for vin_number in next_vins: print(f"trying {vin_number}") try: next_car = scrape_vin(vin_number) print(f"found {next_car.vin}") return Ok(next_car) except PdfReadError as e: print("not found") return Err("No results for vin")
def transition(self, message: Message) -> Result[Error, bool]: checked = self.check_message(message) if checked.is_err(): return checked finalized = self.finalize_message(message) if finalized.is_err(): return finalized return Ok(True)
def check(self, query): """ :param query: """ queries = query.get_queries() for q in queries: if q['aggregator'] == 'none': return Err("No aggregator specified") return Ok(True)
def check(self, query): """ :param query OpenTSDBQuery """ stats = query.get_stats() if stats: duration = float(stats.get('duration', 0)) if self.max_duration <= duration: return Err("Query duration exceeded: {}s Limit: {}s".format(duration, self.max_duration)) return Ok(True)
def _get_join_times_for_all_heaps_worker( gc: AbstractTraceGC, ) -> Result[str, Sequence[JoinTimesForHeap]]: assert gc.HeapCount > 1 # Join durations only valid for server gc if is_empty(gc.ServerGcHeapHistories): return Err("empty ServerGcHeapHistories") else: return Ok([ make_multi_mapping(_process_events_for_heap(heap)) for heap in gc.ServerGcHeapHistories ])
def update_status() -> Result: """ Update the juju status information :return: Result with Ok or Err. """ version = get_glusterfs_version() application_version_set("{}".format(version)) volume_name = config("volume_name") local_bricks = get_local_bricks(volume_name) if local_bricks.is_ok(): status_set(workload_state="active", message="Unit is ready ({} bricks)".format( len(local_bricks.value))) return Ok(()) else: status_set(workload_state="blocked", message="No bricks found") return Ok(())
def g(elements: Sequence[TElement] ) -> Failable[Sequence[TElement]]: out: List[TElement] = [] for i, em in enumerate(elements): b = get_value_for_element(t, elements, i) if b.is_err(): return Err(as_err(b)) elif check_cast(bool, b.unwrap()): out.append(em) return Ok(out)
def FirstToLastGCSeconds(self) -> FailableFloat: if self.process_info is None: return Err("Need a trace") gcs = self.process_info.all_gcs_including_incomplete if len(gcs) < 2: return Err("Need at least 2 gcs") else: return Ok( msec_to_seconds(gcs[-1].StartRelativeMSec - gcs[0].StartRelativeMSec))
def check_message(self, message: Message) -> Result[Error, bool]: validated = MessageValidator.validate(self, message) if validated.is_err(): return validated safety = CliqueOracle.check_safety(message.estimate, self, None) if safety.is_err(): return safety return Ok(True)
def check(self, query): """ :param query: """ if query.get_type() not in {Keyword.SELECT, Keyword.DELETE}: # Only select and delete queries deal with time durations # All others are not affected by this rule. Bailing out. return Ok(True) datapoints = query.get_datapoints() if datapoints <= self.max_datapoints: return Ok(True) return Err(( "Expecting {} datapoints from that query, which is above the threshold! " "Set a date range (e.g. where time > now() - 24h), " "increase grouping (e.g. group by time(24h) " "or limit the number of datapoints (e.g. limit 100)" ).format(datapoints))
def transition(self, message: Message) -> Result[Error, bool]: # TODO: implement checked = self.check_message(message) if checked.is_err(): return checked finalized = self.justify_message(message) if finalized.is_err(): return finalized return Ok(True)
def FirstEventToFirstGCSeconds(self) -> FailableFloat: if self.process_info is None: return Err("Need a trace") ts = self.process_info.events_time_span if ts is None: return Err("Did not specify to collect events") else: return Ok( msec_to_seconds( self.process_info.all_gcs_including_incomplete[0]. StartRelativeMSec - ts.StartMSec))
def testScanDevices(self, _log, _is_block_device, _device_initialized): expected = [ block.BrickDevice(is_block_device=True, initialized=True, mount_path="/mnt/sda", dev_path="/dev/sda"), block.BrickDevice(is_block_device=True, initialized=True, mount_path="/mnt/sdb", dev_path="/dev/sdb"), block.BrickDevice(is_block_device=True, initialized=True, mount_path="/mnt/sdc", dev_path="/dev/sdc") ] _is_block_device.return_value = Ok(True) _device_initialized.return_value = Ok(True) result = block.scan_devices(["/dev/sda", "/dev/sdb", "/dev/sdc"]) self.assertTrue(result.is_ok()) self.assertListEqual(expected, result.value)
def preprocess_md(state: State, file_metadata: FileMetadata, md: str) -> Result[errstr, str]: def title(s: str) -> Result[errstr, str]: pat = '{{\s*(?:title)\s+"(.*)"\s*}}' m = re.search(pat, md) if m is None or m.lastindex != 1: return Err("Invalid match on 'title'") file_metadata.title = m[1] s = s.replace(m[0], "") s = f"# {m[1]}\n{s}" return Ok(s) def desc(s: str) -> Result[errstr, str]: pat = '{{\s*(?:desc)\s+"(.*)"\s*}}' m = re.search(pat, md) if m is None or m.lastindex != 1: return Err("Invalid match on 'desc'") file_metadata.desc = m[1] return Ok(s.replace(m[0], "")) def posts(s: str) -> Result[errstr, str]: pat = "{{\s*(?:posts)\s*}}" m = re.search(pat, md) if m is None: return Err("Invalid match on 'posts'") posts_html = "" for p in state.posts: url = p.path[p.path.find("/"):].replace(".md", ".html") posts_html += ('<div class="post">\n' '<div class="post-heading">\n' f'<a href="{url}">{p.metadata.title}</a>\n' "</div>\n" '<div class="post-desc">\n' f"{p.metadata.desc}\n" "</div>\n" "</div>\n") posts_html = f'<div class="posts">{posts_html}</div>' return Ok(s.replace(m[0], posts_html)) if "{{ title" in md or "{{title" in md: md = title(md).unwrap() if "{{ desc" in md or "{{desc" in md: md = desc(md).unwrap() if "{{ posts" in md or "{{posts" in md: md = posts(md).unwrap() if "[toc]" in md: md = md.replace("[toc]", "[TOC]") md = md.strip() return Ok(md)