def port(): admiral = g.admiral response = {} # TODO: Log entry response['api_log'] = [{ "api_state": "0", "api_no": 0, "api_type": "1", "api_message": "ayy lmao" }] # Background music? response["api_p_bgm_id"] = 100 # This sets the parallel quest count. Don't know what higher values do, default is 5. # I set it to ten because f**k the police response["api_parallel_quest_count"] = 10 # Combined flag? Event data probably. response["api_combined_flag"] = 0 # API basic - a replica of api_get_member/basic response['api_basic'] = util.merge_two_dicts(MemberHelper.basic(), { 'api_medals': 0, 'api_large_dock': 0 }) # Fleets. response['api_deck_port'] = [MemberHelper.fleet(fleet) for fleet in admiral.fleets] # Materials. response['api_material'] = MemberHelper.material() response['api_ship'] = [MemberHelper.kanmusu(kanmusu) for kanmusu in admiral.kanmusu if kanmusu.active] # Generate ndock. response['api_ndock'] = MemberHelper.rdock() return response
def enrich_card(card_data): """Enrich a card with the color cost data""" enriched_card = deepcopy(card_data) enriched_card['_index'] = INDEX_NAME mana_costs = get_color_mana_costs(card_data['_source']['manaCost']) mana_costs.update(get_generic_cost(card_data, mana_costs)) enriched_card['_source'].update(mana_costs) return merge_two_dicts(enriched_card, mana_costs)
def calc_api(local_geojson, valid_adm2_tuples): resp_list = [] with open(local_geojson) as thefile: data = json.load(thefile) url = 'https://0yvx7602sb.execute-api.us-east-1.amazonaws.com/dev/umd-loss-gain' for feat in data['features']: props = feat['properties'] if (props['iso'], int(props['id_1']), int(props['id_2'])) in valid_adm2_tuples: print feat['properties'] payload = {'geojson': {'features': [feat]}} params = {'aggregate_values': False} r = requests.post(url, json=payload, params=params) valid_zstats = False try: resp = r.json() data = resp['data']['attributes'] valid_zstats = True # catch JSON server error response, also non-JSON response except (simplejson.JSONDecodeError, KeyError): logging.error('invalid JSON response from Lambda API:') logging.error(resp) logging.error(feat['properties']) if valid_zstats: print data for loss_year, loss_val in data['loss'].iteritems(): resp_dict = { 'year': loss_year, 'area_loss': loss_val, 'area_gain': data['gain'], 'area_extent_2000': data['treeExtent'], 'area_poly_aoi': data['areaHa'] } row = util.merge_two_dicts(feat['properties'], resp_dict) resp_list.append(row) return pd.DataFrame(resp_list)
def run(args): es = es_client() # don't hate me because I'm mutable INDEX_MAPPINGS['mappings']['cards']['properties'].update(NEW_FIELDS) if es.indices.exists(INDEX_NAME): es.indices.delete(index=INDEX_NAME) idx = merge_two_dicts(INDEX_MAPPINGS, INDEX_SETTINGS) es.indices.create(index=INDEX_NAME, body=idx) card_iter = helpers.scan(es, index='mtg') helpers.bulk(es, enriched_card_iter(card_iter))
def assignment2_part3c(deleted_commits, added_commits): """ Assignment part 3c: frequently identified commit as the VCC :param deleted_commits: a dictionary of the frequently identified commits for the deleted rows. Keys: git hashes, value: number of occurrences :param added_commits: a dictionary of the frequently identified commits for the added rows. Keys: git hashes, value: number of occurrences :return: string of the hash of the vcc """ print("\nPart 3.c -- frequently identified commit as the VCC ---------------------") frequently_identified_commits = util.merge_two_dicts(deleted_commits, added_commits) vcc = max(frequently_identified_commits, key=frequently_identified_commits.get) print(vcc + ' (identified in ' + str(frequently_identified_commits[vcc]) + ' rows)') return vcc
def port(): admiral = g.admiral response = {"api_data": {}} # TODO: Log entry response["api_data"]['api_log'] = [{ "api_state": "0", "api_no": 0, "api_type": "1", "api_message": "ayy lmao" }] # Background music? response["api_data"]["api_p_bgm_id"] = 100 # This sets the parallel quest count. Don't know what higher values do, default is 5. # I set it to ten because f**k the police response["api_data"]["api_parallel_quest_count"] = 10 # Combined flag? Event data probably. response["api_data"]["api_combined_flag"] = 0 # API basic - a replica of api_get_member/basic response['api_data']['api_basic'] = util.merge_two_dicts(basic(), { 'api_medals': 0, 'api_large_dock': 0 }) response['api_data']['api_deck_port'] = [] # Fleets. for fleet in admiral.fleets: fleet_members = [kanmusu.number + 1 for kanmusu in fleet.kanmusu if kanmusu is not None] temp_dict = { # Unknown value, always zero for some reason. 'api_flagship': 0, # The Admiral ID, presumably. 'api_member_id': admiral.id, # The name of the fleet. 'api_name': fleet.name, # Unknown value, always empty. 'api_name_id': "", # The local fleet ID. 'api_id': fleet.number, # List of ships. "api_ship": fleet_members + [-1] * (6 - len(fleet_members)), # Mission data? "api_mission": [0, 0, 0, 0] } response['api_data']['api_deck_port'].append(temp_dict) # Materials. materials = admiral.resources.to_list() materials.append(admiral.get_usables(NAME_BUCKET).quantity) materials.append(admiral.get_usables(NAME_FLAME).quantity) materials.append(admiral.get_usables(NAME_MATERIAL).quantity) materials.append(admiral.get_usables(NAME_SCREW).quantity) response['api_data']['api_material'] = materials response['api_data']['api_ship'] = [ShipHelper.kanmusu_data(kanmusu) for kanmusu in admiral.kanmusu if kanmusu.active] # Generate ndock. response['api_data']['api_ndock'] = _DockHelper.rdock() return response
def merge(self, reference_per_document): self.references = util.merge_two_dicts(self.references, reference_per_document.references)
def generate_port(api_token): # First, get the admiral. admiral = util.get_token_admiral_or_error(api_token) assert isinstance(admiral, Admiral) # Initial KanColle reply. port2 = {"api_data": {}} # TODO: Log entry port2["api_data"]['api_log'] = [{ "api_state": "0", "api_no": 0, "api_type": "1", "api_message": "ayy lmao" }] # Background music? port2["api_data"]["api_p_bgm_id"] = 100 # This sets the parallel quest count. Don't know what higher values do, default is 5. # I set it to ten because f**k the police port2["api_data"]["api_parallel_quest_count"] = 10 # Combined flag? Event data probably. port2["api_data"]["api_combined_flag"] = 0 # API basic - a replica of api_get_member/basic basic = AdmiralHelper.get_admiral_basic_info() port2['api_data']['api_basic'] = util.merge_two_dicts( basic, { 'api_medals': 0, 'api_large_dock': 0 }) port2['api_data']['api_deck_port'] = [ ] #AdmiralHelper.get_admiral_deck_api_data(admiral) count = 0 # Sort the admiral ships list. Not even sure if this is needed... # Fleets. for fleet in admiral.fleets.all(): count += 1 ships = [ ship.local_ship_num + 1 for ship in fleet.ships.all() if ship is not None ] temp_dict = { # Unknown value, always zero for some reason. 'api_flagship': 0, # The Admiral ID, presumably. 'api_member_id': admiral.id, # The name of the fleet. 'api_name': fleet.name, # Unknown value, always empty. 'api_name_id': "", # The local fleet ID. 'api_id': count, # List of ships. "api_ship": ships + [-1] * (6 - len(ships)), # Mission data? "api_mission": [0, 0, 0, 0] } port2['api_data']['api_deck_port'].append(temp_dict) # Materials. port2['api_data'][ 'api_material'] = AdmiralHelper.get_admiral_resources_api_data(admiral) port2['api_data']['api_ship'] = [] # Ship data, Luckily this is generated for us by a helper class. admiral_ships = sorted(admiral.admiral_ships.all(), key=lambda x: x.local_ship_num) for num, ship in enumerate(admiral_ships): if not ship.active: continue assert isinstance(ship, AdmiralShip) port2['api_data']['api_ship'].append( ShipHelper.get_admiral_ship_api_data(ship.id)) # Generate ndock. port2['api_data']['api_ndock'] = DockHelper.generate_dock_data( admiral)['rdock'] return port2
def join_to_api_df(df): # replace common polyname suffix df.polyname = df['polyname'].apply( lambda x: x.replace('_int_diss_gadm28_large.tsv', '')) # clean up other formatting irregularities df.loc[~df['polyname'].str.contains(r'plantation|biome'), ['bound1', 'bound2']] = '-9999' df[['bound3', 'bound4']] = '-9999' tsv_polyname = df.polyname.unique()[0] valid_polynames = get_api_polynames() # reverse any ifl or plantations intersections # so that ifl/plantation polyname is in front if '__' in tsv_polyname and tsv_polyname[0:4] not in ('ifl_', 'plan'): split_poly = tsv_polyname.split('__') tsv_polyname = '__'.join([split_poly[1], split_poly[0]]) # use fuzzy matching to guess proper match matched_polyname, score = process.extractOne(tsv_polyname, valid_polynames) logging.info('{} corrected to {}'.format(tsv_polyname, matched_polyname)) # update polyname field for joining, save original polyname df.polyname = matched_polyname df['tsv_polyname'] = tsv_polyname iso_str = "', '".join(df.iso.unique()) id_1_str = ', '.join(df.id_1.unique().astype(str)) id_2_str = ', '.join(df.id_2.unique().astype(str)) # need to do some kind of lookup here to go from # input polyname to polynames used here: # https://production-api.globalforestwatch.org/v1/query/499682b1-3174-493f-ba1a-368b4636708e?sql=SELECT%20polyname,%20count(*)%20FROM%20data%20GROUP%20BY%20polyname sql = ("SELECT * FROM data WHERE " "polyname = '{}' AND " "thresh = 30 AND " "iso in ('{}') AND adm1 in ({}) " "AND adm2 in ({}) ").format(matched_polyname, iso_str, id_1_str, id_2_str) logging.info(sql) dataset_url = 'https://production-api.globalforestwatch.org/v1/query/499682b1-3174-493f-ba1a-368b4636708e' r = requests.get(dataset_url, params={'sql': sql}) resp = r.json()['data'] row_list = [] for grouped_row in resp: base_data = grouped_row.copy() del base_data['year_data'] for year_dict in grouped_row['year_data']: row = util.merge_two_dicts(base_data, year_dict) row_list.append(row) api_df = pd.DataFrame(row_list) # bound3 and bound4 not used currently api_df[['bound3', 'bound4']] = '-9999' # match API column names, add thresh df = df.rename(columns={'id_1': 'adm1', 'id_2': 'adm2'}) df['thresh'] = 30 for field_name in ['year', 'adm1', 'adm2']: df[field_name] = df[field_name].replace('', -9999) df[field_name] = df[field_name].astype(int) for field_name in ['bound1', 'bound2']: df[field_name] = df[field_name].replace('', '-9999') df[field_name] = df[field_name].astype(unicode) api_df[field_name] = api_df[field_name].astype(unicode) field_list = [ 'polyname', 'bound1', 'bound2', 'bound3', 'bound4', 'iso', 'adm1', 'adm2', 'thresh', 'year' ] merged = pd.merge(df, api_df, how='left', on=field_list, suffixes=['_zstats', '_hadoop']) return merged
def generate_port(api_token): # First, get the admiral. admiral = util.get_token_admiral_or_error(api_token) assert isinstance(admiral, Admiral) # Initial KanColle reply. port2 = { "api_data": {} } # TODO: Log entry port2["api_data"]['api_log'] = [ { "api_state": "0", "api_no": 0, "api_type": "1", "api_message": "ayy lmao" } ] # Background music? port2["api_data"]["api_p_bgm_id"] = 100 # This sets the parallel quest count. Don't know what higher values do, default is 5. # I set it to ten because f**k the police port2["api_data"]["api_parallel_quest_count"] = 10 # Combined flag? Event data probably. port2["api_data"]["api_combined_flag"] = 0 # API basic - a replica of api_get_member/basic basic = AdmiralHelper.get_admiral_basic_info() port2['api_data']['api_basic'] = util.merge_two_dicts(basic, { 'api_medals': 0, 'api_large_dock': 0 }) port2['api_data']['api_deck_port'] = [] #AdmiralHelper.get_admiral_deck_api_data(admiral) count = 0 # Sort the admiral ships list. Not even sure if this is needed... # Fleets. for fleet in admiral.fleets.all(): count += 1 ships = [ship.local_ship_num+1 for ship in fleet.ships.all() if ship is not None] temp_dict = { # Unknown value, always zero for some reason. 'api_flagship': 0, # The Admiral ID, presumably. 'api_member_id': admiral.id, # The name of the fleet. 'api_name': fleet.name, # Unknown value, always empty. 'api_name_id': "", # The local fleet ID. 'api_id': count, # List of ships. "api_ship": ships + [-1] * (6 - len(ships)), # Mission data? "api_mission": [0, 0, 0, 0] } port2['api_data']['api_deck_port'].append(temp_dict) # Materials. port2['api_data']['api_material'] = AdmiralHelper.get_admiral_resources_api_data(admiral) port2['api_data']['api_ship'] = [] # Ship data, Luckily this is generated for us by a helper class. admiral_ships = sorted(admiral.admiral_ships.all(), key=lambda x: x.local_ship_num) for num, ship in enumerate(admiral_ships): if not ship.active: continue assert isinstance(ship, AdmiralShip) port2['api_data']['api_ship'].append(ShipHelper.get_admiral_ship_api_data(ship.id)) # Generate ndock. port2['api_data']['api_ndock'] = DockHelper.generate_dock_data(admiral)['rdock'] return port2
def generate_distributed_synchronized_controllers(): policy = {} policy["S1"] = "pt = 2 . pt <- 4" policy["S2"] = "pt = 12 . pt <- 14" policy["S3"] = "zero" policy["S4"] = "zero" policy["S5"] = "pt = 6 . pt <- 7" policy["S6"] = "pt = 8 . pt <- 10" flow_tables = {} flow_tables["S1"] = [] flow_tables["S2"] = [] flow_tables["S3"] = ["pt = 1 . pt <- 3"] flow_tables["S4"] = ["pt = 11 . pt <- 13"] flow_tables["S5"] = ["pt = 5 . pt <- 7"] flow_tables["S6"] = ["pt = 8 . pt <- 9"] topology = "((pt = 3 . pt <- 5) + (pt = 4 . pt <- 6) + (pt = 7 . pt <- 8) + (pt = 9 . pt <- 11) + (pt = 10 . pt <- 12) + (pt = 13 . pt <- 15) + (pt = 14 . pt <- 16))" channels = ["upS1", "upS2", "upS3", "upS4", "upS5", "upS6", "syn"] switch_rec_vars = calculate_recursive_variables(policy, topology, flow_tables) controllers = {} controllers[ "C1"] = '((upS1 ! "zero") ; ((syn ! "one") ; ((upS3 ! "{}") ; ((upS5 ! "{}") ; bot))))'.format( flow_tables["S3"][0], flow_tables["S5"][0]) controllers[ "C2"] = '((upS2 ! "zero") ; ((syn ? "one") ; ((upS4 ! "{}") ; ((upS6 ! "{}") ; bot))))'.format( flow_tables["S4"][0], flow_tables["S6"][0]) recursive_variables = merge_two_dicts(controllers, switch_rec_vars) in_packets = {"H1_to_H4": "(pt = 2)", "H3_to_H2": "(pt = 1)"} out_packets = {"H1_to_H4": "(pt = 15)", "H3_to_H2": "(pt = 16)"} all_rcfgs = [] all_rcfgs.append('rcfg(upS1, "zero")') all_rcfgs.append('rcfg(upS2, "zero")') all_rcfgs.append('rcfg(upS3, "{}")'.format(flow_tables["S3"][0])) all_rcfgs.append('rcfg(upS4, "{}")'.format(flow_tables["S4"][0])) all_rcfgs.append('rcfg(upS5, "{}")'.format(flow_tables["S5"][0])) all_rcfgs.append('rcfg(upS6, "{}")'.format(flow_tables["S6"][0])) all_rcfgs.append('rcfg(syn, "one")') properties = { "H1_to_H4": [("r", "(head(@Program))", "=0", 2), ("r", '(head(tail(@Program, {{ {} }})))'.format( ' , '.join(all_rcfgs)), "=0", 3)], "H3_to_H2": [("r", "(head(@Program))", "=0", 2), ("r", '(head(tail(@Program, {{ {} }})))'.format( ' , '.join(all_rcfgs)), "=0", 3)] } data = OrderedDict() data['module_name'] = "DISTRIBUTED-CONTROLLER-SYNCHRONIZED" data['recursive_variables'] = recursive_variables data['program'] = "SDN-1 || C1 || C2" data['channels'] = channels data['in_packets'] = in_packets data['out_packets'] = out_packets data['properties'] = properties return data
def generate_port(api_token): # First, get the admiral. admiral = util.get_token_admiral_or_error(api_token) assert isinstance(admiral, db.Admiral) # Initial KanColle reply. port2 = { "api_result_msg": "成功", "api_result": 1, "api_data": {} } # TODO: Log entry port2["api_data"]['api_log'] = [ { "api_state": "0", "api_no": 0, "api_type": "1", "api_message": "ayy lmao" } ] # Background music? port2["api_data"]["api_p_bgm_id"] = 100 # This sets the parallel quest count. Don't know what higher values do, default is 5. # port2["api_data"]["api_parallel_quest_count"] = 5 # Combined flag? Event data probably. port2["api_data"]["api_combined_flag"] = 0 # API basic - a replica of api_get_member/basic basic = AdmiralHelper.get_admiral_basic_info() port2['api_data']['api_basic'] = util.merge_two_dicts(basic, { 'api_medals': 0, 'api_large_dock': 0 }) port2['api_data']['api_deck_port'] = [] count = 0 admiral_ships = sorted(admiral.admiral_ships.all(), key=lambda x: x.local_ship_num) # Fleets. for fleet in admiral.fleets.all(): count += 1 ships = [ship.local_ship_num+1 for ship in fleet.ships.all() if ship is not None] temp_dict = { # Unknown value, always zero for some reason. 'api_flagship': 0, # The Admiral ID, presumably. 'api_member_id': admiral.id, # The name of the fleet. 'api_name': fleet.name, # Unknown value, always empty. 'api_name_id': "", # The local fleet ID. 'api_id': count, # List of ships. "api_ship": ships + [-1] * (6 - len(ships)), # Mission data? "api_mission": [0, 0, 0, 0] } port2['api_data']['api_deck_port'].append(temp_dict) # Materials. port2['api_data']['api_material'] = [ {"api_id": n + 1, "api_member_id": admiral.id, "api_value": int(val)} for n, val in enumerate(admiral.resources.split(',')) ] # Ships! Yay! (said nobody) port2['api_data']['api_ship'] = [] # Generate the absolute clusterfuck. # count = 0 for num, ship in enumerate(admiral_ships): # count += 1 if not ship.active: continue assert isinstance(ship, db.AdmiralShip) port2['api_data']['api_ship'].append(ShipHelper.generate_api_data(admiral.id, ship.local_ship_num)) # Generate ndock. port2['api_data']['api_ndock'] = DockHelper.generate_dock_data(admiral)['rdock'] return port2
from elasticsearch import Elasticsearch, helpers from index_settings import INDEX_MAPPINGS, INDEX_SETTINGS from util import merge_two_dicts FILE = "AllCards.json" ES_HOST = ['127.0.0.1:19200'] CLIENT = Elasticsearch(hosts=ES_HOST, timeout=120) INDEX_NAME = 'mtg' TYPE_NAME = "cards" REPLACE_KEYS = ["power", "toughness"] idx = merge_two_dicts(INDEX_SETTINGS, INDEX_MAPPINGS) with open(FILE) as motg_data: data = json.load(motg_data) # CREATE YOUR BULK REQUEST bulk_data = [] for k, v in data.items(): v["@timestamp"] = datetime.now() for m in REPLACE_KEYS: if v.get(m) and (isinstance(v.get(m), str) or "*" in v.get(m)): v[m] = re.sub('[^0-9]', '', v[m]) op_dict = { "_op_type": "index", "_index": INDEX_NAME, "_type": TYPE_NAME,