def problem1(file): lines = util.read_input("input/"+file+".txt") terrain, units = parse_input(lines) ticks = 0 for i in range(10000): end = False player_order = pydash.sort_by(units, POS) for p in player_order: if p not in units: continue if len(set(map(lambda u: u[TEAM], units))) < 2: end = True break move(p, terrain, units, 3) if end: break ticks += 1 # print(ticks) # print_state(terrain, units) print(ticks) print_state(terrain, units) health_sum = sum(map(lambda u: u[HEALTH], units)) print(ticks * health_sum)
async def perform_autodoor(self, context, target_autodoor): worker = context.blackboard.get_worker() worker_location = pydash.get(worker, 'type_specific.location') aligns = target_autodoor['aligns'] if worker_location['map'] != target_autodoor['map']: print( 'perform_autodoor : does not match worker\'s current map with target resources {}' .format({ 'worker_map': worker_location['map'], 'resource_map': target_autodoor['map'] })) raise Exception( 'perform_autodoor : does not match worker\'s current map with target resources' ) if self._actuator is None: self._actuator = Actuator(context) await self._actuator.init_path_planner(context) #TODO Promise.all 대응 루틴 확인 필요 def cb(point): path = self._actuator.path_planner.get_path( target_autodoor['map'], worker_location['pose2d'], point) point['distance'] = self._actuator.path_planner.get_distance(path) return point aligns = pydash.map_(aligns, cb) aligns = pydash.sort_by(aligns, 'distance') entry = aligns[0] exit = aligns[len(aligns) - 1] autodoor_consumer = AutodoorConsumer() #1. move to entry point await self._actuator.moving(context, entry) #2. request door open await autodoor_consumer.request_open_autodoor(target_autodoor['id'], context) #3. waiting autodoor open await autodoor_consumer.ensure_autodoor_opened(target_autodoor['id'], context) #4. move to exit point await self._actuator.moving(context, exit) #5. close door await autodoor_consumer.request_close_autodoor(target_autodoor['id'], context) await autodoor_consumer.ensure_autodoor_closed(target_autodoor['id'], context) #6. release autodoor resource await context.api_configuration.return_resource( worker['id'], target_autodoor['id']) return True
def test_get_page_iobes(): with open('test/fixtures/parade_page_db.json') as f: parade_page = json.load(f) with open('test/fixtures/parade_page_contexts.json') as f: filter_out_of_bounds = lambda mention: mention['offset'] < len(parade_page['content']) parade_page_contexts = _.map_values(json.load(f), lambda mentions: list(filter(filter_out_of_bounds, mentions))) context_pairs = _.mapcat(_.to_pairs(parade_page_contexts), lambda pair: [[pair[0], mention] for mention in pair[1]]) contexts = _.sort_by(context_pairs, lambda title_mention: title_mention[1]['offset']) mentions = _.flat_map(contexts, _.last) mention_link_titles = list(map(_.head, contexts)) assert parade_iobes == iobes.get_page_iobes(parade_page, mentions, mention_link_titles)
def get_countries_data_of(self, continent_name): # Parameters validation. if (not continent_name): raise NameError( 'You must provide a continent name in order to use the get_countries_data_of() method.') # Sanitizing continent name continent_name = self._sanitize_continent_name(continent_name) country_data_list = [ country for country in self._GWOD if country['Continent Name'] == continent_name.strip()] # Returning data. return pydash.sort_by(country_data_list, ['Country Name'])
def get_continents(self): # 1. Map the GWOD to extract only the continents data. # 2. Make the results unique by continent code. # 3. Sort the resulting collection by continents code. continents = pydash.sort_by( pydash.uniq_by( pydash.map_(self._GWOD, lambda i: pydash.pick( i, ['Continent Code', 'Continent Name'])), 'Continent Code'), 'Continent Name') # Return continent objects. return continents
def print_state(terrain, units): p = [] stats = {} for t in terrain: p.append(t.copy()) for u in units: (y, x) = u[POS] p[y][x] = u[TEAM] stats[y] = stats.get(y, []) stats[y].append(u) y = 0 for t in p: print("".join(t), end=" ") for p in pydash.sort_by(stats.get(y, []), POS): print(f"{p[TEAM]}({p[HEALTH]})", end=" ") print() y += 1
def no_loss_battle(file, attack_power): print("testing attack", attack_power) lines = util.read_input("input/"+file+".txt") terrain, units = parse_input(lines) elves = len(list(filter(lambda u: u[TEAM] == 'E', units))) ticks = 0 for i in range(10000): end = False player_order = pydash.sort_by(units, POS) for p in player_order: if p not in units: continue if len(set(map(lambda u: u[TEAM], units))) < 2: end = True break ap = attack_power if p[TEAM] == 'E' else 3 move(p, terrain, units, ap) alive = len(list(filter(lambda u: u[TEAM] == 'E', units))) if alive < elves: return False if end: break ticks += 1 # print(ticks) # print_state(terrain, units) return terrain, units, ticks
def get_countries_data_by_currency(self, currency_name, continent_name=False): # Parameters validation. if (not currency_name): raise NameError( 'You must provide a currency name in order to use the get_countries_data_by_currency() method.') # Setting up conditions. conditions = {'Currency Name': currency_name.strip()} # Checking if a continent name has been provided. if continent_name: # Sanitizing input fields. continent_name = self._sanitize_continent_name(continent_name) # Trimming and assigning value to the conditions object. conditions['Continent Name'] = continent_name.strip() # Filtering GWOD and return. countries = pydash.filter_(self._GWOD, conditions) countries = pydash.sort_by(countries, ['Country Name']) return countries
def test_sort_by(case, expected): assert _.sort_by(*case) == expected
def get_countries(self): return pydash.sort_by([c for c in self._GWOD], ['Country Name'])
async def perform_narrow_corridor(self, context, target_resource): worker = context.blackboard.get_worker() worker_location = pydash.get(worker, 'type_specific.location') # In case of using narrow corridor # 1. calculate distances between robot and align points aligns = target_resource['aligns'] if worker_location['map'] != target_resource['map']: print( 'perform_narrow_corridor: does not mathc worker\'s current map with target resources', { 'worker_map': worker_location['map'], 'resource_map': target_resource['map'] }) raise Exception( 'perform_narrow_corridor: does not match worker\'s current map with target resources worker_map:{}, resource_map: {}' .format(worker_location['map'], target_resource['map'])) if self._actuator is None: self._actuator = Actuator(context) await self._actuator.init_path_planner(context) # TODO Promise.all 대응 루틴 확인 필요 def cb(point): path = self._actuator.path_planner.get_path( target_resource['map'], worker_location['pose2d'], point) point['distance'] = self._actuator.path_planner.get_distance(path) return point aligns = pydash.map_(aligns, cb) aligns = pydash.sort_by(aligns, 'distance') entry = aligns[0] exit = aligns[len(aligns) - 1] # 2. move to nearest align point await self._actuator.moving(context, entry) # 3. move to farther align point await self._actuator.moving(context, exit) # 4. return occupied resource slot MAX_RETRY = 100 REQUEST_INTERVAL = 100 worker_id = worker['id'] waiter = Waiter(REQUEST_INTERVAL) for i in range(MAX_RETRY): print('return occupied resource {}/{}'.format(i + 1, MAX_RETRY)) target_slots = target_resource['resource_slots'] occupied = pydash.find(target_slots, { 'user_id': worker_id, 'status': 'occupied' }) try: await context.api_configuration.return_resource( worker_id, target_resource['id'], occupied['id']) return True except Exception as err: print('failed to return resource with error') await waiter.wait() print('exceed maximum try to return occupied resource') return False
def convert_csv(path): ap = [] result = "" with open(path) as csvfile: dialect = csv.Sniffer().sniff(csvfile.read(4096)) dialect.doublequote = True csvfile.seek(0) reader = csv.reader(csvfile, dialect) header = False for row in reader: if not header: header = row else: item = {} for i in range(0, len(row)): item[header[i]] = row[i] ap.append(item) domains = pydash.without(pydash.uniq(pydash.map_(ap, 'EA-Domain')), '', None) codelists = pydash.filter_(ap, {'EA-Type': 'ENUMERATION'}) domains = list(set(domains) - set(pydash.map_(codelists.copy(), 'EA-Name'))) domains.sort() final_domains = [] final_datypes = [] classes = pydash.filter_(ap, {'EA-Type': 'CLASS'}) + pydash.filter_(ap, {'EA-Type': 'DATATYPE'}) datatypes = pydash.map_(pydash.filter_(ap, {'EA-Type': 'DATATYPE'}), 'EA-Name') classes_only = pydash.map_(pydash.filter_(ap, {'EA-Type': 'CLASS'}), 'EA-Name') attributes = pydash.filter_(ap, {'EA-Type': 'attribute'}) + pydash.filter_(ap, {'EA-Type': 'connector'}) attributes = pydash.sort_by(attributes, 'EA-Domain') # for enumeration in codelists: # attributes = pydash.remove(attributes, {'EA-Domain': enumeration}) title = os.path.splitext(os.path.basename(path))[0] package = pydash.find(ap, {'EA-Type': 'Package'}) if len(domains) > 0: for domain in domains: klassen = pydash.filter_(classes, {'EA-Name': domain}) if 0 < len(klassen) <= 1: klasse = pydash.find(classes, {'EA-Name': domain}) if klasse['EA-Type'] == 'DATATYPE': result += "\n[%s]\n" % domain final_datypes.append(domain) else: result += "\n[%s]\n" % domain final_domains.append(domain) if klasse is not None: result += 'ap-definition-nl=%s\n' % klasse['ap-definition-nl'] result += 'ap-usagenote-nl=%s\n' % klasse['ap-usageNote-nl'] result += 'namespace=%s\n' % klasse['namespace'] result += 'localname=%s\n' % klasse['localname'] domain_attributes = pydash.filter_(attributes, {'EA-Domain': domain}) domain_attribute_names = pydash.without(pydash.uniq(pydash.map_(domain_attributes, 'EA-Name')), '', None) #localname result += 'attributes=%s\n' % ','.join(domain_attribute_names) for attr_name in domain_attribute_names: result += "\n[%s:%s]\n" % (domain, attr_name) attr = pydash.find(domain_attributes, {'EA-Name': attr_name}) if attr['range'] == "http://www.w3.org/2004/02/skos/core#Concept": ap_codelist = pydash.find(codelists, {'EA-Name': attr['EA-Range']}) if not ap_codelist is None: attr['ap-codelist'] = ap_codelist['ap-codelist'] for key in attr: result += '%s=%s\n' % (key, attr[key].replace('&', '&')) elif len(klassen) > 1: for klasse in klassen: if klasse['ap-label-nl'] == "": klasse['ap-label-nl'] = domain if klasse['EA-Type'] == 'DATATYPE': result += "\n[%s]\n" % klasse['ap-label-nl'] final_datypes.append(klasse['ap-label-nl']) else: result += "\n[%s]\n" % klasse['ap-label-nl'] final_domains.append(klasse['ap-label-nl']) if klasse is not None: result += 'ap-definition-nl=%s\n' % klasse['ap-definition-nl'] result += 'ap-usagenote-nl=%s\n' % klasse['ap-usageNote-nl'] result += 'namespace=%s\n' % klasse['namespace'] result += 'localname=%s\n' % klasse['localname'] domain_attributes = pydash.filter_(attributes, {'EA-Domain-GUID': klasse['EA-GUID']}) domain_attribute_names = pydash.without(pydash.uniq( pydash.map_(domain_attributes, 'localname')), '', None) result += 'attributes=%s\n' % ','.join( domain_attribute_names) for attr_name in domain_attribute_names: result += "\n[%s:%s]\n" % (klasse['ap-label-nl'], attr_name) attr = pydash.find(domain_attributes, {'localname': attr_name}) if attr[ 'range'] == "http://www.w3.org/2004/02/skos/core#Concept": ap_codelist = pydash.find(codelists, { 'EA-Name': attr['EA-Range']}) if not ap_codelist is None: attr['ap-codelist'] = ap_codelist[ 'ap-codelist'] for key in attr: result += '%s=%s\n' % (key, attr[key]) result += "\n[overview]\n" final_domains = list(set(final_domains)) final_domains.sort() result += 'entities=%s\n' % ','.join(final_domains) result += 'dtypes=%s\n' % ','.join(final_datypes) if package is not None: result += 'package=%s\n' % package['EA-Name'].replace('OSLO-', '') result += 'title=%s\n' % title return [result, package['EA-Name'].replace('OSLO-', '')]
def fetch(filters, queries, locations, numberOfPages, maxRate, numBedrooms=None, tries=5): # base url for the craigslist catogory base_url = 'http://losangeles.craigslist.org/search/apa?' api_key="6ry0OW6wJFglXoNKrDSaxOSbeni9i9hlvQ8AeTSwy3qmfzNd2w0LdzLWSBYt5RADq+OKUF840wRzj7/HWBLMJQ==" pageInc = 100 apts = [] for query in queries: for page in range(numberOfPages): params = list(filters) params.append(('query',query)) params.append(('s',page*pageInc)) plainUrl = base_url + urllib.urlencode(params,'') url = urllib.quote(plainUrl) # Setup your REST GET request URL here getUrl = 'https://api.import.io/store/data/ae9b3481-fd34-4f31-88dc-ab2c18edde46/_query?input/webpage/url='+url+'&_user=43864eeb-fab1-4163-94ab-29ce26a543e5&_apikey='+urllib.quote(api_key,'') print 'FETCHING:' print '' print 'search:', query print '' print 'paging:', str(page*pageInc) + "-" + str((page+1)*pageInc) print '' print 'craigslist url:', plainUrl print '' print 'import.io API url:', getUrl print '' noResponse = True t = 0 response = '' data = {} while ('results' not in data) and (t < tries): response = urllib.urlopen(getUrl).read() data = json.loads(response) t = t+1 if 'results' in data: results = data['results'] print str(len(results)) + ' results' for result in results: # Gather the information you want from your API request if all(key in result for key in ['title/_text', 'title', 'price', 'bedrooms', 'location']): title = result['title/_text'] url = result['title'] price = float(result['price'].replace(',','').replace('$','')) bedrooms = float(result['bedrooms'].replace('br','')) location = result['location'].lower() apt = {'title':title, 'url':url, 'price':price, 'bedrooms':bedrooms, 'ratio':price/bedrooms, 'location':location} apts.append(apt) else: print 'FAILURE' print data print '' print '-'*79 print '' def validLoaction(string): found = map(lambda loc: string.find(loc) != -1, locations) if 1 in found: return True else: return False totalResults = len(apts) if numBedrooms: apts = pydash.select(apts, lambda x: x['bedrooms'] == 1.) # sort based on ratio sortedApts = pydash.sort_by(apts, lambda x: x['ratio']) # filter based on ratio filteredApts = pydash.select(sortedApts, lambda x: x['ratio'] <= maxRate and x['ratio'] > 1) # filter location strings locationApts = pydash.select(filteredApts, lambda x: validLoaction(x['location'])) # only show the unique results! uniqApts = pydash.uniq(locationApts) return uniqApts
import pydash ROW = 0 COL = 1 ID = 2 def parse_line(s): bits = s.replace('F', '0').replace('B', '1').replace('L', '0').replace('R', '1') row = int(str(bits[:7]), 2) col = int(str(bits[7:]), 2) return row, col, row * 8 + col passes = [parse_line(l.strip()) for l in open('input.txt').readlines()] print(pydash.max_by(passes, ID)) spasses = pydash.sort_by(passes, ID) for idx in range(0, len(spasses) - 1): if spasses[idx][ID] == spasses[idx + 1][ID] - 2: print(spasses[idx][ID] + 1) break