def return_version(drucker): """Returns either the latest commit hash or tagged release""" latest_commit = subprocess.getoutput( "cd %s && git rev-parse --short HEAD" % (drucker.vars.APP_ROOT) ) if "dev" in drucker.vars.APP_VERSION: commitref = colorful.orange(latest_commit) print("You are running the dev version at commit " + commitref) else: tag = colorful.orange(drucker.vars.APP_VERSION) print("You are running tagged release " + tag) return drucker.vars.EXITCODE_OK
def create_person(self, first_name, last_name, person_type): """create_person """ person_id = len(self.all_people) + 1 # Generate person_id if not first_name.isalpha() or not last_name.isalpha(): print( colorful.orange( "Name should only contain alphabetic characters. \ Please rectify and try again")) return False person = Staff(first_name, last_name, person_id) \ if person_type.lower() == "staff" else Fellow( first_name, last_name, wants_accommodation, person_id) print( colorful.green("{0} {1} {2} has been successfully added".format( person_type, first_name, last_name))) self.all_people.append(person) return person
def translate(self, locale, iso_a2): """ Lookup the countries dataset for the country matching by ISO A2 code When there is a match, the function looks for the translation using the given locale or using the language component of it. Returns None when either there is no match or there is no translation for the matched city. """ props = self.dataset.get(iso_a2.upper()) if props is not None: name_key = "name_" + map_locale(locale) value = props.get(name_key) if value is None: print(c.orange("Missing translation for {} ({}) under the {} key" .format(iso_a2, locale, name_key))) else: return value return None
def write_db(project_code, db_dir, dict_data): if db_dir: db_path = db_dir else: db_path = op.join(op.dirname(__file__), "db") db_full_path = op.join(db_path, project_code + "_db_.json") db = TinyDB(db_full_path) query = Query() for user in dict_data: for session in dict_data[user]: if not db.table(user).search( query.session_id == session["session_id"]): db.table(user).insert(session) print( colorful.green(" {} with session {} stored in db".format( user, session["session_id"]))) else: print( colorful.orange(" {} with session {} already in db".format( user, session["session_id"]))) return db
def translate(self, locale, english_name): """ Lookup the populated places dataset for the city matching by name, par name or name representation in ASCII. When there is a match, the function looks for the translation using the given locale or using the language component of it. Returns None when either there is no match or there is no translation for the matched city. """ props = self.dataset.get(english_name) if props is not None: name_key = "name_" + map_locale(locale) value = props.get(name_key) if value is None: print(c.orange("Missing translation for {} ({}) under the {} key" .format(english_name, locale, name_key))) else: return value return None
def draw(coords: List, speed=0.0) -> None: # pyautogui.moveTo(0, 0, duration=0.5) # pyautogui.moveRel(0, 100, duration=0.5) # pyautogui.dragTo(button='left') # print(pyautogui.position()) # pyautogui.write('Hello world') # selenium or pyautogui hmm # core-letter-cell contains each letters values print( cf.orange( 'Warning: getting ready to draw, do not touch cursor, Move to a corner to exit' )) time.sleep(1) for w, cd in coords: off = grid_to_pix[cd[0]] print(cf.green(f'drawing {w} \r'), end='') pyautogui.moveTo(off[0], off[1], duration=speed) pyautogui.mouseDown() for c in cd[1:]: off = grid_to_pix[c] pyautogui.moveTo(off[0], off[1], duration=speed) pyautogui.mouseUp()
def extract_relay_locations_pot(countries): pot = POFile(encoding='utf-8', check_for_duplicates=True) pot.metadata = {"Content-Type": "text/plain; charset=utf-8"} output_path = path.join(LOCALE_OUT_DIR, RELAY_LOCATIONS_POT_FILENAME) print("Generating {}".format(output_path)) for country in countries: country_name = country.get("name") if country_name is not None: entry = POEntry( msgid=country_name, msgstr="", comment=country.get("code").upper() ) pot.append(entry) print("{} ({})".format(country_name, country.get("code"))) cities = country.get("cities") if cities is not None: for city in cities: city_name = city.get("name") if city_name is not None: entry = POEntry( msgid=city_name, msgstr="", comment="{} {}".format(country.get("code").upper(), city.get("code").upper()) ) try: pot.append(entry) except ValueError as err: print(c.orange("Cannot add an entry: {}".format(err))) print("{} ({})".format(city_name, city.get("code"))) pot.save(output_path)
def translate_single_relay_locations(country_translator, city_translator, countries, locale): """ A helper function to generate the relay-locations.po for the given locale. The `countries` argument is an array value that's contained within the "countries" key of the relay location list. """ po = POFile(encoding='utf-8', check_for_duplicates=True) po.metadata = {"Content-Type": "text/plain; charset=utf-8"} locale_out_dir = path.join(LOCALE_OUT_DIR, locale) output_path = path.join(locale_out_dir, RELAY_LOCATIONS_PO_FILENAME) hits = 0 misses = 0 if not path.exists(locale_out_dir): os.makedirs(locale_out_dir) for country in countries: country_name = country.get("name") country_code = country.get("code") translated_country_name = country_translator.translate(locale, country_code) found_country_translation = translated_country_name is not None # Default to empty string if no translation was found if found_country_translation: hits += 1 else: translated_country_name = "" misses += 1 log_message = "{} ({}) -> \"{}\"".format(country_name, country_code, translated_country_name) if found_country_translation: print(c.green(log_message)) else: print(c.orange(log_message)) # translate country entry = POEntry( msgid=country_name, msgstr=translated_country_name, comment=country_code.upper() ) po.append(entry) # translate cities cities = country.get("cities") if cities is None: print(c.orange("Skip {} ({}) because no cities were found." .format(country_name, country_code))) continue for city in cities: city_name = city.get("name") city_code = city.get("code") if city_name is None: raise ValueError("Missing the name field in city record.") # Make sure to append the US state back to the translated name of the city if country_code == "us": split = city_name.rsplit(",", 2) translated_name = city_translator.translate(locale, split[0].strip()) if translated_name is not None and len(split) > 1: translated_name = "{}, {}".format(translated_name, split[1].strip()) else: translated_name = city_translator.translate(locale, city_name) # Default to empty string if no translation was found found_translation = translated_name is not None if found_translation: hits += 1 else: translated_name = "" misses += 1 log_message = "{} ({}) -> \"{}\"".format(city_name, city_code, translated_name) if found_translation: print(c.green(log_message)) else: print(c.orange(log_message)) entry = POEntry( msgid=city_name, msgstr=translated_name, comment="{} {}".format(country_code.upper(), city_code.upper()) ) try: po.append(entry) except ValueError as err: print(c.orange("Cannot add an entry: {}".format(err))) po.save(output_path) return (hits, misses)
test_summary = [] failure_happened = False for test_dir in tests: parameters = ['terraform-compliance'] parameters.extend(Config.default_parameters.copy()) directory = '{}/{}'.format(Config.test_dir, test_dir) test_result = '' expected = '' if not os.path.isfile( '{}/plan.out.json'.format(directory)) or not os.path.isfile( '{}/test.feature'.format(directory)): test_result = colorful.orange('skipped') else: if os.path.isfile('{}/.failure'.format(directory)): parameters.append('--wip') if os.path.isfile('{}/.expected'.format(directory)): with open('{}/.expected'.format(directory)) as expected_file: expected = expected_file.read() parameters.extend([ '-f', '{}'.format(directory), '-p', '{}/plan.out.json'.format(directory) ]) try: print('Running {}.'.format(colorful.yellow(test_dir)))
def extract_cities_po(): input_path = get_shape_path("ne_50m_populated_places") stats = [] for locale in os.listdir(LOCALE_DIR): locale_dir = path.join(LOCALE_DIR, locale) locale_out_dir = path.join(LOCALE_OUT_DIR, locale) if os.path.isdir(locale_dir): po = POFile(encoding='utf-8', check_for_duplicates=True) po.metadata = {"Content-Type": "text/plain; charset=utf-8"} output_path = path.join(locale_out_dir, CITIES_PO_FILENAME) hits = 0 misses = 0 if not path.exists(locale_out_dir): os.makedirs(locale_out_dir) print("Generating {}".format(output_path)) with fiona.open(input_path) as source: for feat in source: props = lower_dict_keys(feat["properties"]) if props["pop_max"] >= POPULATION_MAX_FILTER: name_key = "name_" + map_locale(locale) name_fallback = "name" if props.get(name_key) is not None: translated_name = props.get(name_key) hits += 1 elif props.get(name_fallback) is not None: translated_name = props.get(name_fallback) print( c.orange("Missing translation for {}".format( translated_name))) misses += 1 else: raise ValueError( "Cannot find the translation for {}. Probe keys: {}" .format(locale, (name_key, name_fallback))) entry = POEntry(msgid=props.get("name"), msgstr=translated_name) try: po.append(entry) except ValueError as err: print( c.orange( "Cannot add an entry: {}".format(err))) sort_pofile_entries(po) po.save(output_path) print( c.green("Extracted {} cities to {}".format( len(po), output_path))) stats.append((locale, hits, misses)) print_stats_table("Cities translations", stats)
def _get_dataframe_or_path( self, id: Optional[str], name: Optional[str], version: Optional[DatasetVersion], file: Optional[str], get_dataframe: bool, ) -> Optional[Union[str, pd.DataFrame]]: if self.figshare_map is not None: try: return self._get_dataframe_or_path_from_figshare(id, get_dataframe) except ValueError as e: print(cf.red(str(e))) return None self._set_token_and_initialized_api() if not self.api.is_connected(): return self._get_dataframe_or_path_offline( id, name, version, file, get_dataframe ) # Validate inputs try: datafile_metadata = self._validate_file_for_download( id, name, str(version) if version is not None else version, file ) version = datafile_metadata.dataset_version except (TaigaDeletedVersionException, ValueError, Exception) as e: print(cf.red(str(e))) return None datafile_format = datafile_metadata.datafile_format if get_dataframe and datafile_format == DataFileFormat.Raw: print( cf.red( "The file is a Raw one, please use instead `download_to_cache` with the same parameters" ) ) return None # Check the cache if id is not None: query = id else: query = format_datafile_id(name, version, file) full_taiga_id = format_datafile_id_from_datafile_metadata(datafile_metadata) if datafile_metadata.underlying_file_id is not None: full_taiga_id = datafile_metadata.underlying_file_id underlying_datafile_metadata = self.api.get_datafile_metadata( datafile_metadata.underlying_file_id, None, None, None ) if underlying_datafile_metadata.state != DatasetVersionState.approved: print( cf.orange( f"The underlying datafile for the file you are trying to download is from a {underlying_datafile_metadata.state.value} dataset version." ) ) get_from_cache = ( self.cache.get_entry if get_dataframe else self.cache.get_raw_path ) try: df_or_path = get_from_cache(query, full_taiga_id) if df_or_path is not None: return df_or_path except TaigaCacheFileCorrupted as e: print(cf.orange(str(e))) # Download from Taiga try: return self._download_file_and_save_to_cache( query, full_taiga_id, datafile_metadata, get_dataframe ) except (Taiga404Exception, ValueError) as e: print(cf.red(str(e))) return None
def write_summary(features): """Write the end report after all Feature Files are ran""" feature_states = [f.state for f in features] features_line = "{} Feature{} ({})".format( len(feature_states), "s" if len(feature_states) != 1 else "", ", ".join("{} {}".format(v, k.name.lower()) for k, v in Counter(feature_states).items()), ) scenarios = [] rules_scenarios = (rule.scenarios for feature in features for rule in feature.rules) for scenario in itertools.chain(*rules_scenarios): if hasattr(scenario, "examples"): scenarios.extend(scenario.examples) else: scenarios.append(scenario) scenarios_line = "{} Scenario{} ({})".format( len(scenarios), "s" if len(scenarios) != 1 else "", ", ".join("{} {}".format(v, k.name.lower()) for k, v in Counter(s.state for s in scenarios).items()), ) steps = [s for s in scenarios for s in s.steps] steps_line = "{} Step{} ({})".format( len(steps), "s" if len(steps) != 1 else "", ", ".join("{} {}".format(v, k.name.lower()) for k, v in Counter(s.state for s in steps).items()), ) print(features_line, flush=True) print(scenarios_line, flush=True) print(steps_line, flush=True) # remind about pending Steps pending_steps = [s for s in steps if s.state is State.PENDING] if pending_steps: pending_step_implementations = {s.step_impl for s in pending_steps} print( cf.orange( "You have {} pending Step Implementation{} affecting {} Step{}:" .format( cf.bold_orange(len(pending_step_implementations)), "s" if len(pending_step_implementations) != 1 else "", cf.bold_orange(len(pending_steps)), "s" if len(pending_steps) != 1 else "", ))) for pending_step_implementation in pending_step_implementations: print( cf.orange("* '{} {}' @ {}:{}".format( cf.bold_orange(pending_step_implementation.keyword), cf.bold_orange(pending_step_implementation.pattern), cf.bold_orange( pending_step_implementation.func.__code__.co_filename), cf.bold_orange(pending_step_implementation.func.__code__. co_firstlineno), ))) print( cf.orange( "Note: This may be the reason for potentially failed Steps!")) total_duration = sum((f.duration() for f in features), timedelta()) timing_information = cf.deepSkyBlue3( "Run {marker} finished within {duration} seconds".format( marker=cf.bold_deepSkyBlue3(world.config.marker), duration=cf.bold_deepSkyBlue3(total_duration.total_seconds()), )) print(timing_information, flush=True)
def add_person(self, first_name, last_name, person_type, wants_accommodation="N"): """ Used to create a new Person instance. Args: first_name (str): First name. last_name (str): Last name. person_type (str): Type of person ie Fellow / Staff. wants_accommodation : Indicates wether someone wants accomodation. Can be N for No or Y for Yes Returns: Room !! Room[]: The return value. One Room is one name is provided and a list of Rooms if more than one is provided. """ rooms = [] person_id = len(self.people) + 1 # Generate person_id if not first_name.isalpha() or not last_name.isalpha(): print( colorful.orange( "Name should only contain alphabetic characters.\ Please rectify and try again")) return person = Staff(first_name, last_name, person_id) \ if person_type.lower() == "staff" else Fellow( first_name, last_name, wants_accommodation, person_id) print( colorful.green("{0} {1} {2} has been successfully added".format( person_type, first_name, last_name))) self.people.append(person) # Assign office to person office_rooms = \ [room for room in self.rooms if room._type.lower() == "office" and not room.fully_occupied] # chosen_room = random.choice(office_rooms) if office_rooms: chosen_room = random.choice(office_rooms) add_person_to_room(person, chosen_room) person.has_office = True rooms.append({"office": chosen_room.name}) print( colorful.green("{0} has been allocated the office {1}".format( first_name, chosen_room.name))) else: print( colorful.red( "Sorry, No more office rooms for {0} to occupy.".format( first_name))) if wants_accommodation is "Y" and person_type.lower() == "staff": print( colorful.red( "Sorry, No living space has been allocated to you as these" " are only meant for fellows.")) # Assign person living_space if wants_accommodation is "Y" and person_type.lower() == "fellow": accommodation_rooms = [ room for room in self.rooms if room._type is "living_space" ] for living_room in accommodation_rooms: if not living_room.fully_occupied: add_person_to_room(person, living_room) person.has_living_space = True rooms.append({"living_space": living_room.name}) print( colorful.green( "{0} has been allocated the living space {1}". format(first_name, living_room.name))) break if not person.has_living_space: print( colorful.red( "Sorry, there are no more free accommodation rooms" "for {0} to occupy.".format(first_name))) person.rooms_occupied = rooms return { "Person": person.first_name + " " + person.last_name, "Rooms": rooms }
def extract_countries_po(): input_path = get_shape_path("ne_50m_admin_0_countries") for locale in os.listdir(LOCALE_DIR): locale_dir = path.join(LOCALE_DIR, locale) locale_out_dir = path.join(LOCALE_OUT_DIR, locale) if os.path.isdir(locale_dir): with fiona.open(input_path) as source: po = POFile(encoding='UTF-8') po.metadata = {"Content-Type": "text/plain; charset=utf-8"} output_path = path.join(locale_out_dir, "countries.po") if not path.exists(locale_out_dir): os.makedirs(locale_out_dir) print "Generating {}/countries.po".format(locale) for feat in source: props = lower_dict_keys(feat["properties"]) name_key = "_".join(("name", get_locale_language(locale))) name_alt_key = "_".join(("name", convert_locale_ident(locale))) name_fallback = "name" country_name = props.get("name") formal_country_name = props.get("formal_en", country_name) if props.get(name_key) is not None: translated_name = props.get(name_key) elif props.get(name_alt_key) is not None: translated_name = props.get(name_alt_key) elif props.get(name_fallback) is not None: translated_name = props.get(name_fallback) print c.orange(u" Missing translation for {}".format(translated_name)) else: raise ValueError( "Cannot find the translation for {}. Probe keys: {}" .format(locale, (name_key, name_alt_key)) ) entry = POEntry( msgid=country_name, msgstr=translated_name ) po.append(entry) # add additional record for the formal country name. if country_name != formal_country_name and formal_country_name is not None: entry = POEntry( msgid=formal_country_name, msgstr=translated_name ) po.append(entry) # exception for the US if props.get("iso_a3") == "USA": entry = POEntry( msgid="USA", msgstr=translated_name ) po.append(entry) # exception for the UK if props.get("iso_a3") == "GBR": entry = POEntry( msgid="UK", msgstr=translated_name ) po.append(entry) po.save(output_path) print c.green("Extracted {} countries for {} to {}".format(len(po), locale, output_path))
from collections import namedtuple import colorful colorful.use_style("solarized") SIZE = 9 LINE_LENGTH = (SIZE * 3) + 3 EMPTY = "-" SHIP = "S" HIT = "H" MISS = "M" SUNK = "Sunk" BOARD_COLOURS = { SHIP: colorful.blue, HIT: colorful.red, MISS: colorful.violet, EMPTY: colorful.base02, "recent": colorful.yellow, } SEPERATOR = " " BOARD_SEPERATOR = colorful.cyan(" --|-- ") PROMPT = colorful.orange(">>> ") START_SHIPS = [7, 5, 4, 4, 3, 2] Point = namedtuple("Point", "x y") Ship = namedtuple("Ship", "position length orientation")
def run_tests(tests): test_summary = [] failure_happened = False for test_dir in tests: parameters = ['terraform-compliance', '--no-ansi'] # Ignore if there are any .terraform folders in this level. They can build up when writing tests. if '.terraform' in test_dir: continue feature_directory = '{}/../..'.format(test_dir) test_result = '' expected = '' unexpected = '' if not os.path.isfile('{}/plan.out.json'.format(test_dir)): test_result = colorful.orange('skipped') else: if os.path.isfile('{}/.failure'.format(test_dir)): parameters.append('--wip') if os.path.isfile('{}/.expected'.format(test_dir)): with open('{}/.expected'.format(test_dir)) as expected_file: expected = expected_file.read().split('\n') if os.path.isfile('{}/.unexpected'.format(test_dir)): with open( '{}/.unexpected'.format(test_dir)) as unexpected_file: unexpected = unexpected_file.read().split('\n') if not os.path.isfile('{}/.no_early_exit'.format(test_dir)): parameters.append('-q') parameters.extend([ '-f', '{}'.format(feature_directory), '-p', '{}/plan.out.json'.format(test_dir) ]) try: print('Running {}.'.format(colorful.yellow(test_dir))) # TODO: Add multithreading here if we have more than 50+ integration tests ? test_process = subprocess.run( parameters, check=True, # shell=True, stdout=subprocess.PIPE, universal_newlines=True, ) if os.environ.get('DEBUG'): print('Output: {}'.format( colorful.grey(test_process.stdout))) if test_process.returncode == 0: if expected: expected_failures = [ exp for exp in expected if not re.findall(exp, str(test_process.stdout)) ] if expected_failures: print('\nOutput: {}'.format(test_process.stdout)) print('Can not find ;') for failure in expected_failures: print('\t{}'.format(colorful.yellow(failure))) print('in the test output.\n') test_result = colorful.red('failed') failure_happened = True if unexpected: unexpected_failures = [ unexp for unexp in unexpected if re.findall(unexp, str(test_process.stdout)) ] if unexpected_failures: print('\nOutput: {}'.format(test_process.stdout)) print('Found;') for failure in expected_failures: print('\t{}'.format(colorful.yellow(failure))) print('in the test output. This was unexpected.\n') test_result = colorful.red('failed') failure_happened = True if not failure_happened: test_result = colorful.green('passed') else: print('Output: {}'.format(test_process.stdout)) test_result = colorful.red('failed') failure_happened = True except subprocess.CalledProcessError as e: failure_happened = True if e.returncode != 1: test_result = colorful.orange('errored') else: test_result = colorful.red('failed') print( 'Expected a different return code. Received {}'.format( colorful.yellow(e.returncode))) print('Output: {}'.format(e.stdout)) test_summary.append('{:.<70s}{:.>10s}'.format(test_dir, test_result)) return failure_happened, test_summary
def _validate_file_for_download( self, id_or_permaname: Optional[str], dataset_name: Optional[str], dataset_version: Optional[str], datafile_name: Optional[str], ) -> DataFileMetadata: if id_or_permaname is None and dataset_name is None: # TODO standardize exceptions raise ValueError("id or name must be specified") elif ( id_or_permaname is None and dataset_name is not None and dataset_version is None ): dataset_metadata: DatasetMetadataDict = ( self.api.get_dataset_version_metadata(dataset_name, None) ) dataset_version = get_latest_valid_version_from_metadata(dataset_metadata) print( cf.orange( "No dataset version provided. Using version {}.".format( dataset_version ) ) ) metadata = self.api.get_datafile_metadata( id_or_permaname, dataset_name, dataset_version, datafile_name ) if metadata is None: raise ValueError( "No data for the given parameters. Please check your inputs are correct." ) dataset_version_id = metadata.dataset_version_id dataset_permaname = metadata.dataset_permaname dataset_version = metadata.dataset_version datafile_name = metadata.datafile_name data_state = metadata.state data_reason_state = metadata.reason_state assert dataset_version_id is not None assert dataset_permaname is not None assert dataset_version is not None assert datafile_name is not None if data_state == DatasetVersionState.deprecated.value: print( cf.orange( "WARNING: This version is deprecated. Please use with caution, and see the reason below:" ) ) print(cf.orange("\t{}".format(data_reason_state))) elif data_state == DatasetVersionState.deleted.value: self.cache.remove_all_from_cache( "{}.{}/".format(dataset_permaname, dataset_version) ) raise TaigaDeletedVersionException( "{} version {} is deleted. The data is not available anymore. Contact the maintainer of the dataset.".format( dataset_permaname, dataset_version ) ) return metadata
def translate_relay_locations(place_translator, countries, locale): po = POFile(encoding='UTF-8') po.metadata = {"Content-Type": "text/plain; charset=utf-8"} locale_out_dir = path.join(LOCALE_OUT_DIR, locale) output_path = path.join(locale_out_dir, "relay-locations.po") hits = 0 misses = 0 if not path.exists(locale_out_dir): os.makedirs(locale_out_dir) for country in countries: country_name = country.get("name") country_code = country.get("code") cities = country.get("cities") if cities is None: print c.orange(u"Skip {} ({}) because no cities were found.".format( country_name, country_code)) continue for city in cities: city_name = city.get("name") city_code = city.get("code") if city_name is None: raise ValueError("Missing the name field in city record.") # Make sure to append the US state back to the translated name of the city if country_code == "us": split = city_name.rsplit(",", 2) translated_name = place_translator.translate(locale, split[0].strip()) if translated_name is not None and len(split) > 1: translated_name = u"{}, {}".format(translated_name, split[1].strip()) else: translated_name = place_translator.translate(locale, city_name) # Default to empty string if no translation was found found_translation = translated_name is not None if found_translation: hits += 1 else: translated_name = "" misses += 1 log_message = u" {} ({}) -> \"{}\"".format( city_name, city_code, translated_name).encode('utf-8') if found_translation: print c.green(log_message) else: print c.orange(log_message) entry = POEntry( msgid=city_name, msgstr=translated_name, comment=u"{} {}".format(country.get("code").upper(), city.get("code").upper()) ) po.append(entry) po.save(output_path) return (hits, misses)
print(cf.red('red' + cf.white(' white ', nested=True) + 'red')) # combine styles with strings print(cf.bold & cf.red | 'Hello World') # use true colors cf.use_true_colors() # extend default color palette cf.update_palette({'mint': '#c5e8c8'}) print(cf.mint_on_snow('Wow, this is actually mint')) # choose a predefined style cf.use_style('solarized') # print the official solarized colors print(cf.yellow('yellow'), cf.orange('orange'), cf.red('red'), cf.magenta('magenta'), cf.violet('violet'), cf.blue('blue'), cf.cyan('cyan'), cf.green('green')) # directly print with colors cf.print('{c.bold_blue}Hello World{c.reset}') # choose specific color mode for one block with cf.with_8_ansi_colors() as c: print(c.bold_green('colorful is awesome!')) # create and choose your own color palette MY_COMPANY_PALETTE = { 'companyOrange': '#f4b942', 'companyBaige': '#e8dcc5'
def assert_step_match( step: Step, expected_step_func: str, expected_step_arguments: List[Dict[str, Any]], step_registry: StepRegistry, ): """Assert that the Step correctly matches in the Registry""" print( "{} STEP '{}' SHOULD MATCH {}".format( cf.orange(">>"), cf.deepSkyBlue3("{} {}".format(step.keyword, step.text)), cf.deepSkyBlue3(expected_step_func), ), end=" ", flush=True, ) # match the step text from the config with one from the registry try: matcher.match_step(step, step_registry) except StepImplementationNotFoundError: print_failure(None, ["Expected Step Text didn't match any Step Implementation"]) return False # check if Step matches the correct Step Implementation Function matched_step_func = step.step_impl.func if matched_step_func.__name__ != expected_step_func: print_failure( matched_step_func, [ "Expected Step Text matched {} instead of {}".format( matched_step_func.__name__, expected_step_func ) ], ) return False # check if the Step has a match with the correct arguments if expected_step_arguments: # merge the Step's keyword and positional arguments into one dict args, kwargs = step.step_impl_match.evaluate() actual_step_arguments = utils.get_func_pos_args_as_kwargs( matched_step_func, args ) actual_step_arguments.update(kwargs) # turn the list of single-item-dicts to a multi-item dict # -> [{1: 2}, {3: 4}] --> {1: 2, 3: 4} # NOTE(TF) for Python 3.5 test reproducibility we need an OrderedDict -.^ expected_step_arguments = OrderedDict( ( argpair for argpairs in expected_step_arguments for argpair in argpairs.items() ) ) errors = assert_step_arguments(actual_step_arguments, expected_step_arguments) if errors: print_failure(matched_step_func, errors) return False print(cf.bold_forestGreen("✔")) return True