def test_ps_ok(self, check_output_mock): """Simple test with a plausible `ps` output""" # We'll create two `Processes` instances. processes_1 = Processes() _ = Processes() self.assertListEqual(processes_1.get(), [ 'what', 'an', 'awesome', 'processes', 'list', 'you', 'got', 'there' ]) # The class has been instantiated twice, but `check_output` has been called only once. # `unittest.mock.Mock.assert_called_once` is not available against Python < 3.6. self.assertEqual(check_output_mock.call_count, 1)
def test_ps_ok(self, check_output_mock): """Simple test with a plausible `ps` output""" # We'll create two `Processes` instances. processes_1 = Processes() _ = Processes() self.assertTupleEqual(processes_1.list, ('what', 'an', 'awesome', 'processes', 'list', 'you', 'got', 'there')) self.assertEqual(processes_1.number, 8) # The class has been instantiated twice, but `check_output` has been called only once. self.assertTrue(check_output_mock.assert_called_once)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) processes = Processes().list for de_id, de_name in DE_DICT.items(): if de_id in processes: self.value = de_name break else: # Let's rely on an environment variable if the loop above didn't `break`. self.value = os.getenv('XDG_CURRENT_DESKTOP')
def main(): """Simple entry point""" # `Processes` is a singleton, let's populate the internal list here. Processes() # `Configuration` is a singleton, let's populate the internal object here. configuration = Configuration() output = Output() for entry in Entries: if configuration.get('entries', {}).get(entry.name, True): output.append(entry.name, entry.value().value) output.output()
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) try: self.value = re.search( '(?<=Name: ).*', check_output(['wmctrl', '-m'], stderr=DEVNULL, universal_newlines=True)).group(0) except (FileNotFoundError, CalledProcessError): processes = Processes().list for wm_id, wm_name in WM_DICT.items(): if wm_id in processes: self.value = wm_name break
def __init__(self): processes = Processes().get() for key, value in DE_DICT.items(): if key in processes: desktop_environment = value break else: # Let's rely on an environment var if the loop above didn't `break` desktop_environment = os.getenv( 'XDG_CURRENT_DESKTOP', Configuration().get('default_strings')['not_detected'] ) self.value = desktop_environment
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # macOS' desktop environment is called "Aqua", # and could not be detected from processes list. if platform.system() == 'Darwin': self.value = 'Aqua' return processes = Processes().list for de_id, de_name in DE_DICT.items(): if de_id in processes: self.value = de_name break else: # Let's rely on an environment variable if the loop above didn't `break`. self.value = os.getenv('XDG_CURRENT_DESKTOP')
def main(): """Simple entry point""" args = args_parsing() # `Processes` is a singleton, let's populate the internal list here. Processes() # `Configuration` is a singleton, let's populate the internal object here. configuration = Configuration(config_path=args.config_path) # From configuration, gather the entries user-enabled. enabled_entries = [ (entry.value, entry.name) for entry in Entries if configuration.get('entries', {}).get(entry.name, True) ] output = Output(preferred_distribution=args.distribution, format_to_json=args.json) # We will map this function onto our enabled entries to instantiate them. def _entry_instantiator(entry_tuple): return entry_tuple[0](name=entry_tuple[1]) # Let's use a context manager stack to manage conditional use of `TheadPoolExecutor`. with ExitStack() as cm_stack: if not configuration.get('parallel_loading'): mapper = map else: # Instantiate a threads pool to load our enabled entries in parallel. # We use threads (and not processes) since most work done by our entries is IO-bound. # `max_workers` is manually computed to mimic Python 3.8+ behaviour, but for our needs. # See <https://github.com/python/cpython/pull/13618>. executor = cm_stack.enter_context( ThreadPoolExecutor(max_workers=min( len(enabled_entries) or 1, (os.cpu_count() or 1) + 4))) mapper = executor.map for entry_instance in mapper(_entry_instantiator, enabled_entries): output.add_entry(entry_instance) output.output() # Has the screenshot flag been specified ? if args.screenshot is not None: # If so, but still _falsy_, pass `None` as no output file has been specified by the user. take_screenshot((args.screenshot or None))
def __init__(self): try: window_manager = re.search( '(?<=Name: ).*', check_output(['wmctrl', '-m'], stderr=DEVNULL, universal_newlines=True)).group(0) except (FileNotFoundError, CalledProcessError): processes = Processes().get() for key, value in WM_DICT.items(): if key in processes: window_manager = value break else: window_manager = Configuration().get( 'default_strings')['not_detected'] self.value = window_manager
def main(): """Simple entry point""" parser = argparse.ArgumentParser(prog='archey') parser.add_argument('-v', '--version', action='version', version=__version__) parser.parse_args() # `Processes` is a singleton, let's populate the internal list here. Processes() # `Configuration` is a singleton, let's populate the internal object here. configuration = Configuration() output = Output() for entry in Entries: if configuration.get('entries', {}).get(entry.name, True): output.append(entry.name, entry.value().value) output.output()
def main(): """Simple entry point""" args = args_parsing() # Setup logging. logging.basicConfig(format='%(levelname)s: %(message)s') # Populate our internal singletons once and for all. Processes() Environment() configuration = Configuration(config_path=args.config_path) # From configuration, gather the entries user-configured. available_entries = configuration.get('entries') if available_entries is None: # If none were specified, lazy-mimic a full-enabled entries list without any configuration. available_entries = [{'type': entry_name} for entry_name in Entries.__members__.keys()] output = Output( preferred_logo_style=args.logo_style, preferred_distribution=args.distribution, format_to_json=args.json ) # We will map this function onto our enabled entries to instantiate them. def _entry_instantiator(entry: dict) -> Optional[Entry]: # Based on **required** `type` field, instantiate the corresponding `Entry` object. try: return Entries[entry.pop('type')].value( name=entry.pop('name', None), # `name` is fully-optional. options=entry # Remaining fields should be propagated as options. ) except KeyError as key_error: logging.warning( 'One entry (misses or) uses an invalid `type` field (%s).', key_error ) return None # Let's use a context manager stack to manage conditional use of `TheadPoolExecutor`. with ExitStack() as cm_stack: if not configuration.get('parallel_loading'): mapper = map else: # Instantiate a threads pool to load our enabled entries in parallel. # We use threads (and not processes) since most work done by our entries is IO-bound. # `max_workers` is manually computed to mimic Python 3.8+ behaviour, but for our needs. # See <https://github.com/python/cpython/pull/13618>. executor = cm_stack.enter_context(ThreadPoolExecutor( max_workers=min(len(available_entries) or 1, (os.cpu_count() or 1) + 4) )) mapper = executor.map for entry_instance in mapper(_entry_instantiator, available_entries): if not entry_instance: continue output.add_entry(entry_instance) output.output() # Has the screenshot flag been specified ? if args.screenshot is not None: # If so, but still _falsy_, pass `None` as no output file has been specified by the user. try: screenshot_taken = take_screenshot((args.screenshot or None)) except KeyboardInterrupt: screenshot_taken = False print() finally: sys.exit((not screenshot_taken))
def test_ps_not_available(self, _): """Checks behavior when `ps` is not available""" self.assertTupleEmpty(Processes().list)
def test_ps_failed(self, _): """Verifies that the program correctly handles first crashing `ps` call""" self.assertListEqual(Processes().get(), ['sh', 'top', 'ps'])