def test_add_homes(): """ Tests map.add_homes """ this_map = deepcopy(Map()) this_map.add_homes('FRANCE', 'BRE MAR PAR'.split(), reinit=1) assert this_map.homes['FRANCE'] == ['BRE', 'MAR', 'PAR'] this_map.add_homes('FRANCE', [], reinit=1) assert this_map.homes['FRANCE'] == []
def test_is_valid_unit(): """ Tests maps.is_valid_unit """ # ADR = WATER # ALB = COAST # BUL/EC = COAST # BUR = LAND # SWI = SHUT this_map = deepcopy(Map()) assert this_map.is_valid_unit('A ADR', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('A ALB', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('A BUL', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('A BUL/EC', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('A BUR', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('A SWI', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('F ADR', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('F ALB', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('F BUL', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('F BUL/EC', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('F BUR', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('F SWI', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('F ADR', no_coast_ok=1, shut_ok=0) == 1 assert this_map.is_valid_unit('F ALB', no_coast_ok=1, shut_ok=0) == 1 assert this_map.is_valid_unit('F BUL', no_coast_ok=1, shut_ok=0) == 1 assert this_map.is_valid_unit('F BUL/EC', no_coast_ok=1, shut_ok=0) == 1 assert this_map.is_valid_unit('F BUR', no_coast_ok=1, shut_ok=0) == 0 assert this_map.is_valid_unit('F SWI', no_coast_ok=1, shut_ok=0) == 0 assert this_map.is_valid_unit('? ADR', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('? ALB', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('? BUL', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('? BUL/EC', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('? BUR', no_coast_ok=0, shut_ok=0) == 1 assert this_map.is_valid_unit('? SWI', no_coast_ok=0, shut_ok=0) == 0 assert this_map.is_valid_unit('A SWI', no_coast_ok=0, shut_ok=1) == 1 assert this_map.is_valid_unit('F SWI', no_coast_ok=0, shut_ok=1) == 1 assert this_map.is_valid_unit('? SWI', no_coast_ok=0, shut_ok=1) == 1
def test_area_type(): """ Tests map.area_type """ this_map = deepcopy(Map()) assert this_map.area_type('ADR') == 'WATER' assert this_map.area_type('ALB') == 'COAST' assert this_map.area_type('BUL/EC') == 'COAST' assert this_map.area_type('BUR') == 'LAND' assert this_map.area_type('SWI') == 'SHUT'
def test_map_with_full_path(): """ Tests for map creation """ maps = glob.glob(os.path.join(MODULE_PATH, 'maps', '*.map')) assert maps, 'Expected maps to be found.' for current_map in maps: this_map = Map(current_map) assert this_map.error == [], 'Map %s should have no errors' % current_map del this_map
def test_map_creation(): """ Tests for map creation """ maps = glob.glob(os.path.join(MODULE_PATH, 'maps', '*.map')) assert maps, 'Expected maps to be found.' for current_map in maps: map_name = current_map[current_map.rfind('/') + 1:].replace('.map', '') this_map = Map(map_name) assert this_map.error == [], 'Map %s should have no errors' % map_name del this_map
def test_abut_list(): """ Tests map.abut_list """ this_map = deepcopy(Map()) this_map.loc_abut['---'] = ['ABC', 'DEF', 'GHI'] this_map.loc_abut['aaa'] = ['LOW', 'HIG', 'MAY'] assert this_map.abut_list('---') == ['ABC', 'DEF', 'GHI'] assert this_map.abut_list('AAA') == ['LOW', 'HIG', 'MAY'] assert this_map.abut_list('LVP') == [ 'CLY', 'edi', 'IRI', 'NAO', 'WAL', 'yor' ]
def test_default_coast(): """ Tests map.default_coast """ this_map = deepcopy(Map()) assert this_map.default_coast(['F', 'GRE', '-', 'BUL']) == ['F', 'GRE', '-', 'BUL/SC'] assert this_map.default_coast(['F', 'MAO', '-', 'SPA']) == ['F', 'MAO', '-', 'SPA'] assert this_map.default_coast(['F', 'FIN', '-', 'STP']) == ['F', 'FIN', '-', 'STP/SC'] assert this_map.default_coast(['F', 'NAO', '-', 'MAO']) == ['F', 'NAO', '-', 'MAO']
def test_find_previous_phase(): """ Tests map.find_previous_phase """ this_map = deepcopy(Map()) assert this_map.find_previous_phase('FORMING') == 'FORMING' assert this_map.find_previous_phase('COMPLETED') == 'COMPLETED' assert this_map.find_previous_phase( 'SPRING 1902 MOVEMENT') == 'WINTER 1901 ADJUSTMENTS' assert this_map.find_previous_phase('SPRING 1902 MOVEMENT', phase_type='R') == 'FALL 1901 RETREATS' assert this_map.find_previous_phase('SPRING 1903 MOVEMENT', phase_type='R', skip=1) == 'SPRING 1902 RETREATS'
def test_abuts(): """ Tests map.abuts """ this_map = deepcopy(Map()) assert this_map.abuts('A', 'POR', 'S', 'SPA/NC') == 1 assert this_map.abuts('A', 'POR', 'C', 'SPA/NC') == 0 assert this_map.abuts('A', 'MUN', 'S', 'SWI') == 0 assert this_map.abuts('?', 'YOR', 'S', 'LVP') == 1 assert this_map.abuts('F', 'YOR', 'S', 'LVP') == 0 assert this_map.abuts('A', 'YOR', 'S', 'LVP') == 1 assert this_map.abuts('F', 'BOT', 'S', 'STP') == 1 assert this_map.abuts('F', 'BOT', 'S', 'MOS') == 0 assert this_map.abuts('F', 'VEN', 'S', 'TUS') == 0 assert this_map.abuts('A', 'POR', 'C', 'MAO') == 1
def add_to_cache(map_name, max_convoy_length=MAX_CONVOY_LENGTH): """ Lazy generates convoys paths for a map and adds it to the disk cache :param map_name: The name of the map :param max_convoy_length: The maximum convoy length permitted :return: The convoy_paths for that map """ convoy_paths = {'__version__': __VERSION__} # Uses hash as key external_convoy_paths = {'__version__': __VERSION__} # Uses hash as key # Loading from internal cache first if os.path.exists(INTERNAL_CACHE_PATH): try: cache_data = pickle.load(open(INTERNAL_CACHE_PATH, 'rb')) if cache_data.get('__version__', '') == __VERSION__: convoy_paths.update(cache_data) except (pickle.UnpicklingError, EOFError): pass # Loading external cache if os.path.exists(EXTERNAL_CACHE_PATH): try: cache_data = pickle.load(open(EXTERNAL_CACHE_PATH, 'rb')) if cache_data.get('__version__', '') != __VERSION__: print('Upgrading cache from "%s" to "%s"' % (cache_data.get('__version__', '<N/A>'), __VERSION__)) else: convoy_paths.update(cache_data) external_convoy_paths.update(cache_data) except (pickle.UnpicklingError, EOFError): pass # Getting map MD5 hash if os.path.exists(map_name): map_path = map_name else: map_path = os.path.join(settings.PACKAGE_DIR, 'maps', map_name + '.map') if not os.path.exists(map_path): return None map_hash = get_file_md5(map_path) # Generating and adding to alternate cache paths if map_hash not in convoy_paths: map_object = Map(map_name, use_cache=False) convoy_paths[map_hash] = _build_convoy_paths_cache(map_object, max_convoy_length) external_convoy_paths[map_hash] = convoy_paths[map_hash] os.makedirs(os.path.dirname(EXTERNAL_CACHE_PATH), exist_ok=True) pickle.dump(external_convoy_paths, open(EXTERNAL_CACHE_PATH, 'wb')) # Returning return convoy_paths[map_hash]
def test_phase_long(): """ Test map.phase_long """ this_map = deepcopy(Map()) assert this_map.phase_long('S1901M') == 'SPRING 1901 MOVEMENT' assert this_map.phase_long('S1901R') == 'SPRING 1901 RETREATS' assert this_map.phase_long('F1901M') == 'FALL 1901 MOVEMENT' assert this_map.phase_long('F1901R') == 'FALL 1901 RETREATS' assert this_map.phase_long('W1901A') == 'WINTER 1901 ADJUSTMENTS' assert this_map.phase_long('s1901m') == 'SPRING 1901 MOVEMENT' assert this_map.phase_long('s1901r') == 'SPRING 1901 RETREATS' assert this_map.phase_long('f1901m') == 'FALL 1901 MOVEMENT' assert this_map.phase_long('f1901r') == 'FALL 1901 RETREATS' assert this_map.phase_long('w1901a') == 'WINTER 1901 ADJUSTMENTS' assert this_map.phase_long('bad') == '?????' assert this_map.phase_long('bad', default='Test') == 'Test'
def test_compare_phases(): """ Tests map.compare_phases """ this_map = deepcopy(Map()) assert this_map.compare_phases('FORMING', 'S1901M') == -1 assert this_map.compare_phases('COMPLETED', 'S1901M') == 1 assert this_map.compare_phases('S1901M', 'FORMING') == 1 assert this_map.compare_phases('S1901M', 'COMPLETED') == -1 assert this_map.compare_phases('FORMING', 'COMPLETED') == -1 assert this_map.compare_phases('COMPLETED', 'FORMING') == 1 assert this_map.compare_phases('S1901M', 'S1902M') == -1 assert this_map.compare_phases('S1902M', 'S1901M') == 1 assert this_map.compare_phases('S1901M', 'F1901M') == -1 assert this_map.compare_phases('F1901M', 'S1901M') == 1 assert this_map.compare_phases('S1901?', 'S1901R') == 0 assert this_map.compare_phases('F1901?', 'F1901R') == 0 assert this_map.compare_phases('W1901?', 'W1901A') == 0
def test_phase_abbr(): """ Tests map.phase_abbr """ this_map = deepcopy(Map()) assert this_map.phase_abbr('SPRING 1901 MOVEMENT') == 'S1901M' assert this_map.phase_abbr('SPRING 1901 RETREATS') == 'S1901R' assert this_map.phase_abbr('FALL 1901 MOVEMENT') == 'F1901M' assert this_map.phase_abbr('FALL 1901 RETREATS') == 'F1901R' assert this_map.phase_abbr('WINTER 1901 ADJUSTMENTS') == 'W1901A' assert this_map.phase_abbr('spring 1901 movement') == 'S1901M' assert this_map.phase_abbr('spring 1901 retreats') == 'S1901R' assert this_map.phase_abbr('fall 1901 movement') == 'F1901M' assert this_map.phase_abbr('fall 1901 retreats') == 'F1901R' assert this_map.phase_abbr('winter 1901 adjustments') == 'W1901A' assert this_map.phase_abbr('COMPLETED') == 'COMPLETED' assert this_map.phase_abbr('FORMING') == 'FORMING' assert this_map.phase_abbr('Bad') == '?????' assert this_map.phase_abbr('Bad', default='Test') == 'Test'
def test_internal_cache(): """ Tests that all maps with a SVG are in the internal cache """ maps = glob.glob(os.path.join(MODULE_PATH, 'maps', '*.map')) assert maps, 'Expected maps to be found.' assert os.path.exists( INTERNAL_CACHE_PATH), 'Expected internal cache to exist' # Checking that maps with a svg are in the internal cache with open(INTERNAL_CACHE_PATH, 'rb') as cache_file: internal_cache = pickle.load(cache_file) for current_map in maps: map_name = current_map[current_map.rfind('/') + 1:].replace( '.map', '') this_map = Map(map_name) if not this_map.svg_path: continue assert get_file_md5( current_map ) in internal_cache, 'Map "%s" not found in internal cache' % map_name del this_map
def add_to_cache(map_name): """ Lazy generates convoys paths for a map and adds it to the disk cache :param map_name: The name of the map :return: The convoy_paths for that map """ disk_convoy_paths = {'__version__': __VERSION__} # Uses hash as key # Loading cache from disk (only if it's the correct version) if os.path.exists(DISK_CACHE_PATH): try: cache_data = pickle.load(open(DISK_CACHE_PATH, 'rb')) if cache_data.get('__version__', '') != __VERSION__: print('Upgrading cache from "%s" to "%s"' % (cache_data.get('__version__', '<N/A>'), __VERSION__)) else: disk_convoy_paths.update(cache_data) # Invalid pickle file - Rebuilding except (pickle.UnpicklingError, EOFError): pass # Getting map MD5 hash map_path = os.path.join(settings.PACKAGE_DIR, 'maps', map_name + '.map') if not os.path.exists(map_path): return None map_hash = get_file_md5(map_path) # Determining the depth of the search (small maps can have larger depth) max_convoy_length = SMALL_MAPS_CONVOY_LENGTH if map_name in SMALL_MAPS else ALL_MAPS_CONVOY_LENGTH # Generating and adding to alternate cache paths if map_hash not in disk_convoy_paths: map_object = Map(map_name, use_cache=False) disk_convoy_paths[map_hash] = build_convoy_paths_cache( map_object, max_convoy_length) os.makedirs(os.path.dirname(DISK_CACHE_PATH), exist_ok=True) pickle.dump(disk_convoy_paths, open(DISK_CACHE_PATH, 'wb')) # Returning return disk_convoy_paths[map_hash]
def test_vet(): """ Tests map.vet """ this_map = deepcopy(Map()) assert this_map.vet(['A B']) == [('A B', 0)] assert this_map.vet(['SPAIN/NC']) == [('SPAIN/NC', 1)] assert this_map.vet(['SPANISH']) == [('SPANISH', 1)] assert this_map.vet(['A']) == [('A', 2)] assert this_map.vet(['F']) == [('F', 2)] assert this_map.vet(['POR']) == [('POR', 3)] assert this_map.vet(['SPA']) == [('SPA', 3)] assert this_map.vet(['SPA/NC']) == [('SPA/NC', 4)] assert this_map.vet(['S']) == [('S', 5)] assert this_map.vet(['C']) == [('C', 5)] assert this_map.vet(['H']) == [('H', 5)] assert this_map.vet(['-']) == [('-', 6)] assert this_map.vet(['=']) == [('=', 6)] assert this_map.vet(['_']) == [('_', 6)] assert this_map.vet(['|']) == [('|', 7)] assert this_map.vet(['?']) == [('?', 7)] assert this_map.vet(['~']) == [('~', 7)] assert this_map.vet(['ZZZ'], strict=0) == [('ZZZ', 3)] assert this_map.vet(['ZZZ'], strict=1) == [('ZZZ', -3)]
def _load_available_maps(self): """ Load a dictionary (self.available_maps) mapping every map name to a dict of map info. for all maps available in diplomacy package. """ diplomacy_map_dir = os.path.join(diplomacy.settings.PACKAGE_DIR, strings.MAPS) new_maps_mtime = self.maps_mtime for filename in os.listdir(diplomacy_map_dir): if filename.endswith('.map'): map_filename = os.path.join(diplomacy_map_dir, filename) map_mtime = os.path.getmtime(map_filename) map_name = filename[:-4] if map_name not in self.available_maps or map_mtime > self.maps_mtime: # Either it's a new map file or map file was modified. available_map = Map(map_name) self.available_maps[map_name] = { 'powers': set(available_map.powers), 'supply_centers': set(available_map.scs), 'loc_type': available_map.loc_type.copy(), 'loc_abut': available_map.loc_abut.copy(), 'aliases': available_map.aliases.copy() } new_maps_mtime = max(new_maps_mtime, map_mtime) self.maps_mtime = new_maps_mtime
def test_drop(): """ Tests map.drop """ this_map = deepcopy(Map()) this_map.drop('STP') assert not [ loc for loc in list(this_map.locs) if loc.upper().startswith('STP') ] assert not [ loc_name for (loc_name, loc) in list(this_map.loc_name.items()) if loc.startswith('STP') ] assert not [ alias for (alias, value) in list(this_map.aliases.items()) if value.startswith('STP') ] assert not [ homes for homes in list(this_map.homes.values()) if 'STP' in homes ] assert not [ units for units in list(this_map.units.values()) for unit in units if unit[2:5] == 'STP'[:3] ] assert not [ center for center in list(this_map.scs) if center.upper().startswith('STP') ] assert not [ p_name for (p_name, scs) in this_map.centers.items() for center in scs if center.startswith('STP') ] assert not [ loc for loc, abuts in list(this_map.loc_abut.items()) for there in abuts if loc.startswith('STP') or there.startswith('STP') ] assert not [ loc for loc in list(this_map.loc_type.keys()) if loc.startswith('STP') ]
def test_maps(): """ Building required maps to avoid timeout on the primary test """ for map_name in ('ancmed', 'colonial', 'empire', 'known_world_901', 'modern', 'standard', 'standard_france_austria', 'standard_germany_italy', 'world'): Map(map_name)
def test_norm(): """ Tests map.norm """ this_map = deepcopy(Map()) assert this_map.norm('abc def. ghi/jkl!-ABC|~ (Hello)' ) == 'ABC DEF GHI /JKL ! ABC | ~ ( HELLO )'
def test_compact(): """ Tests map.compact """ this_map = deepcopy(Map()) # Power name at top of string is removed by Map.compact(). assert this_map.compact('England: Fleet Western Mediterranean -> Tyrrhenian Sea. (*bounce*)') \ == ['F', 'WES', 'TYS', '|']
def is_valid_saved_game(saved_game): """ Checks if the saved game is valid. This is an expensive operation because it replays the game. :param saved_game: The saved game (from to_saved_game_format) :return: A boolean that indicates if the game is valid """ # pylint: disable=too-many-return-statements, too-many-nested-blocks, too-many-branches nb_forced_phases = 0 max_nb_forced_phases = 1 if 'DIFFERENT_ADJUDICATION' in saved_game.get('rules', []) else 0 # Validating default fields if 'id' not in saved_game or not saved_game['id']: return False if 'map' not in saved_game: return False map_object = Map(saved_game['map']) if map_object.name != saved_game['map']: return False if 'rules' not in saved_game: return False if 'phases' not in saved_game: return False # Validating each phase nb_messages = 0 nb_phases = len(saved_game['phases']) last_time_sent = -1 for phase_ix in range(nb_phases): current_phase = saved_game['phases'][phase_ix] state = current_phase['state'] phase_orders = current_phase['orders'] previous_phase_name = 'FORMING' if phase_ix == 0 else saved_game['phases'][phase_ix - 1]['name'] next_phase_name = 'COMPLETED' if phase_ix == nb_phases - 1 else saved_game['phases'][phase_ix + 1]['name'] power_names = list(state['units'].keys()) # Validating messages for message in saved_game['phases'][phase_ix]['messages']: nb_messages += 1 if map_object.compare_phases(previous_phase_name, message['phase']) >= 0: return False if map_object.compare_phases(message['phase'], next_phase_name) > 0: return False if message['sender'] not in power_names + ['SYSTEM']: return False if message['recipient'] not in power_names + ['GLOBAL']: return False if message['time_sent'] < last_time_sent: return False last_time_sent = message['time_sent'] # Validating phase if phase_ix < (nb_phases - 1): is_forced_phase = False # Setting game state game = Game(saved_game['id'], map_name=saved_game['map'], rules=['SOLITAIRE'] + saved_game['rules']) game.set_phase_data(GamePhaseData.from_dict(current_phase)) # Determining what phase we should expect from the dataset. next_state = saved_game['phases'][phase_ix + 1]['state'] # Setting orders game.clear_orders() for power_name in phase_orders: game.set_orders(power_name, phase_orders[power_name]) # Validating orders orders = game.get_orders() possible_orders = game.get_all_possible_orders() for power_name in orders: if sorted(orders[power_name]) != sorted(current_phase['orders'][power_name]): return False if 'NO_CHECK' not in game.rules: for order in orders[power_name]: loc = order.split()[1] if order not in possible_orders[loc]: return False # Validating resulting state game.process() # Checking phase name if game.get_current_phase() != next_state['name']: is_forced_phase = True # Checking zobrist hash if game.get_hash() != next_state['zobrist_hash']: is_forced_phase = True # Checking units units = game.get_units() for power_name in units: if sorted(units[power_name]) != sorted(next_state['units'][power_name]): is_forced_phase = True # Checking centers centers = game.get_centers() for power_name in centers: if sorted(centers[power_name]) != sorted(next_state['centers'][power_name]): is_forced_phase = True # Allowing 1 forced phase if DIFFERENT_ADJUDICATION is in rule if is_forced_phase: nb_forced_phases += 1 if nb_forced_phases > max_nb_forced_phases: return False # Making sure NO_PRESS is not set if 'NO_PRESS' in saved_game['rules'] and nb_messages > 0: return False # The data is valid return True
def test_str(): """ Tests map.__str__ """ this_map = deepcopy(Map()) assert str(this_map) == this_map.name
def test_init(): """ Creates a map""" Map()