def generate(self, r_in=""): ''' Generate a species using the specified race, or randomly chosen if none is given''' endpoint = "api/races/" if r_in == "": # Grab all races JSON data = get_data(endpoint)["results"] races = list() # Select a random race for elt in data: races.append(elt["index"]) print("Race: ", elt["index"]) self.name = races[random.randint(0, len(races) - 1)] print("Creating random race: ", self.name) else: # Select the input race self.name = r_in print("Creating race: ", self.name) # Grab this character's race JSON endpoint += (self.name + "/") data = get_data(endpoint) # Set data from JSON self.speed = data["speed"] self.size = data["size"] for elt in data["languages"]: self.languages.append(elt["name"]) for elt in data["ability_bonuses"]: self.scores[elt["name"]] = elt["bonus"] print("Bonus: ", elt["name"], " = ", elt["bonus"])
def start(): print "Choose which state you want to Get Data for." state = vf.decide_state() state_code = str(state_fips[state]) #get state_code print "Do you want your data to be seasonally adjusted or unseasonally adjusted?" print "Warning: county level data is only available as unseasonally adjusted." seasonal_adj = vf.get_adjustment() level = "ST" print "Choose which type of data you want." print """ Labor Force Employment Unemployment Unemployment Rate """ measure_code = str(vf.get_measurecode()) series_id = "LA" + seasonal_adj + level + state_code + "000" + measure_code start_year = vf.choose_startyear() end_year = vf.choose_endyear() apidata=get_data(series_id, start_year, end_year, state) chart_title = vf.get_charttitle(state,measure_code, start_year, end_year) data_output(apidata, chart_title) #start graphing and tabling! print "Would you like to find another state's unemployment data?" choose_continue = raw_input("Type yes or no > ") if choose_continue[0].lower() == "y": print "Alright lets continue." start() elif choose_continue[0].lower == "n": print "Thank you! Goodbye" exit()
def get_new_data(): """Updates the global variable 'df' with new data""" global df, balances df = api.get_data("options_active") # the status from the subgraph data will only change if # unlock and unlockAll API is called. this is currently done manually! # to address this I check for it and set samples with active status # but expiration in the past (smaller than timestamp utc now) to EXPIRED df = df[df["expiration"] >= pd.Timestamp.utcnow().tz_localize(None)] df = prepare_data.get_projected_profit(df) balances = prepare_data.get_pool_balances()
def extract_intersection(args): ''' Process selected intersections listed in a given CSV file. :param args: Dictionary with function arguments: args['city_name'] = Name of the city. E.g., 'San Francisco, California, USA'. args['osm_file'] = Name of the OSM file. args['cross_streets'] = List [<street_name_1>, <street_name_2>, ...] pointing to an intersection. args['crop_radius'] = Crop radius for intersection extraction. Default = 80. args['debug'] = (Optional) Boolean parameter indicating whether DEBUG info must be logged. :returns res: Dictionary with resulting info: res['intersection'] = Dictionary with intersection data. ''' if args == None: return None city_name = args['city_name'] osm_file = args['osm_file'] cross_streets = args['cross_streets'] crop_radius = 80 if 'crop_radius' in args.keys(): crop_radius = args['crop_radius'] debug = False if 'debug' in args.keys(): debug = args['debug'] #city_area = api.get_data(file_name=osm_file) city_area = api.get_data(city_name=city_name) intersecting_streets = api.get_intersecting_streets(city_area) intersection_addr = None for ia in intersecting_streets: isin = True for cs in cross_streets: isin &= cs in ia if isin: intersection_addr = ia intersection = api.get_intersection(intersection_addr, city_area, crop_radius=crop_radius) res = {'intersection': intersection} return res
def get(self, key='key'): ''' This function will return the corresponding value of the key demanded by user. ''' if key in self.jsonObject: print(self.jsonObject[key]) elif 'http' in key: print('Key Not Found\nBut adding the requested key-value pair') value = api.get_data(key) self.set(key, value) else: print('Key Not Found')
def results(location, term=''): business = get_data(location, term) if business == None: return redirect('/no-results') name = business['name'] image = business['image_url'] rating = business['rating'] address = ' '.join(business['location']['display_address']) maps = 'https://www.google.com/maps/search/?api=1&query=' + urllib.parse.quote_plus( name + ' ' + business['location']['display_address'][-1]) return render_template('results.html', name=name, image=image, rating=rating, address=address, maps=maps)
def execute_job(jid): update_job_status(jid, 'in progress') # Create figure count = int(rd.get('count')) adoptions_dict = get_data() age_x = [] adoptions_y = [] for i in range(0, count): if (adoptions_dict[i]["Outcome Type"] == "Adoption"): adoption_age = adoptions_dict[i]["Age upon Outcome"] if adoption_age not in age_x: age_x.append(adoption_age) adoptions_y.append(1) else: for index, item in enumerate(age_x): if item == adoption_age: adoptions_y[index] = adoption_y[index] + 1
def create_chart(): global rebalance_type global rebalance_period global rebalance_threshold global portfolio if rebalance_type is None: return {} hodl, snap, stats = get_data(rebalance_type, rebalance_period, rebalance_threshold, portfolio) return { 'data': [ go.Line( x=snap.index, y=snap.values, mode='lines', name='Snapfund', marker={ 'size': 5, 'color': '#774C6C', 'opacity': 0.9, }, ), go.Line( x=hodl.index, y=hodl.values, mode='lines', name='HODL', marker={ 'size': 5, 'color': '#7FDBFF', 'opacity': 0.9, }, ) ], 'layout': { 'height': 500, 'margin': { 'l': 60, 'b': 30, 'r': 10, 't': 10 }, 'annotations': [{ 'x': 0.51, 'y': 0.85, 'xanchor': 'center', 'yanchor': 'bottom', 'xref': 'paper', 'yref': 'paper', 'showarrow': False, 'text-align': 'center', 'bgcolor': 'rgba(255, 255, 255, 0.5)', 'text': """Snapfund final: {:.2f}% \n HODL final: {:.2f}% \n Rebalanced: {} times """.format(stats['snap'] * 100, stats['hodl'] * 100, stats['n_rebalances']) }], 'yaxis': { 'color': '#505050' }, 'xaxis': { 'showgrid': False, 'color': '#505050' }, 'paper_bgcolor': '#ffffff', 'plot_bgcolor': '#ffffff', } }
resultLowerCase = sentence.lower() # replace enter (\n) with space resultNoEnter = re.sub('[\t\n]', ' ', resultLowerCase) # replace tabs and multiple spaces with single space resultNoTab = re.sub(' +', ' ', resultNoEnter) # change text encoding to utf8 encodedText = resultNoTab # encodedText = resultNoTab.encode("utf-8") # # apply Sastrawi stopper removal # endText = stopword.remove(encodedText) word = encodedText.split() full_words = full_words + word # print words # text ready to be compared with database dataDosen = get_data('dosen') dataJudul = get_data('judul') dataRegexNomor = get_data('nomor') dataRegexIsi = get_data('isi') # sample # dataDosen = ['ahmadi yuli ananta','ariadi retno tri hayati ririd','arief prasetyo','banni satria andoko','budi harijanto','cahya rahmad','deddy kusbianto purwoko aji','dimas wahyu wibowo''dwi puspitasari','dyah ayu irawati','ekojono','ely setyo astuti','erfan rohadi','faisal rahutomo','gunawan budiprasetyo','hendra pradibta','imam fahrur rozi','indra dharma wijaya','luqman affandi', 'nurudin santoso','putra prima arhandi','rawansyah','ridwan rismanto','rosa andrie asmara','siti romlah','ulla defana rosiani','yan watequlis syaifudin'] # dataJudul = ['surat tugas', 'lembar pengesahan'] # check document type document_type_matched_array = [] is_multiple = "" for item in dataJudul: trigger_word_array = item['trigger_word'].split( ', ' ) # get trigger word, split by comma and space to get its array form
import csv from api import get_data data = get_data()['customers'] keys = data[0].keys() with open("data.csv", 'wt') as file: writer = csv.writer(file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) writer.writerow([key for key in keys]) for info in data: writer.writerow([ i for i in info.values()])
def main(argv): print(__doc__) maps_dir = "maps" city_name = "San Francisco, California, USA" data_dir = "intersections" input_file = "intersections.csv" #input_file = "intersections0.csv" ignored_directions = ['u_turn'] crop_radius = 80 debug = True args = { 'city_name': city_name, 'data_dir': data_dir, 'crop_radius': crop_radius, 'debug': debug } #generate_intersection_list(args) if False: return intersections_file = posixpath.join(maps_dir, input_file) id_list = [2, 4, 5, 7, 10, 11, 14] id_list = [4, 5, 6] args = { 'city_name': city_name, 'maps_dir': maps_dir, 'intersections_file': intersections_file, 'id_list': id_list, 'ignored_directions': ignored_directions, 'crop_radius': crop_radius, 'debug': debug } res = process_intersections(args) for k in res.keys(): kmlguideways = "{}/guideways_{}.kml".format(data_dir, k) args = { 'kmlfile': kmlguideways, 'guideways': res[k]['guideways'], 'crosswalks': res[k]['crosswalks'], 'debug': debug } geo.export_guideways_kml(args) kmltraces = "{}/traces_{}.kml".format(data_dir, k) args = { 'kmlfile': kmltraces, 'traces': res[k]['traces'], 'latlon': True, 'color': "FF990099", 'debug': debug } geo.export_traces_kml(args) if True: return city_name = "Berkeley, California, USA" osm_file = "../osm/ComponentDr_NorthFirstSt_SJ.osm" city = api.get_data(city_name=city_name) city = api.get_data(file_name=osm_file) cross_streets = api.get_intersecting_streets(city) sz = len(cross_streets) x_section_addr = cross_streets[0] x_section_addr = ('University Avenue', 'Acton Street') x_section_addr = ('North 1st Street', 'Component Drive') x_section = api.get_intersection(x_section_addr, city, crop_radius=50.0) guideways = api.get_guideways(x_section) crosswalks = api.get_crosswalks(x_section) #fig = api.get_intersection_image(x_section) #fig.savefig("intersection.jpg") fig = api.get_guideway_image(guideways, x_section) fig.savefig("guideways.jpg") #print(x_section_addr) #print(guideways) #print(crosswalks) main_gw = [guideways[0]] c_idx = [1, 3] g_idx = [1, 6, 9, 11] r_idx = [18, 19] b_idx = [25, 28] my_cw, my_gw, my_rw, my_bw = [], [], [], [] # crosswalks for idx in c_idx: my_cw.append(crosswalks[idx]) # vehicle guideways for idx in g_idx: my_gw.append(guideways[idx]) # railroads for idx in r_idx: my_rw.append(guideways[idx]) # bicycle routes for idx in b_idx: my_bw.append(guideways[idx]) conflict_zones = api.get_conflict_zones(main_gw[0], my_gw + my_rw + my_bw + my_cw) blocking_guideways = my_gw point_of_view = (0.1, 0.5) blind_zone = api.get_blind_zone(point_of_view, main_gw[0], conflict_zones[4], blocking_guideways, guideways) fig = api.get_conflict_zone_image(conflict_zones, x_section) fig.savefig("conflict_zones.jpg") fig = api.get_blind_zone_image(blind_zone, main_gw[0], x_section, blocks=blocking_guideways) fig.savefig("blind_zone.jpg") kml_file = 'GG.kml' my_kml = KML() #my_kml.crosswalk_medians(my_cw, width=15) #my_kml.guideway_medians(my_gw, width=20) #my_kml.guideway_medians(main_gw, color="ffffff00", width=20) my_kml.crosswalks(my_cw) my_kml.guideways(my_rw, color="ff00BBBB") my_kml.guideways(my_bw, color="ff00DD00") my_kml.guideways(my_gw, color="ffff0000") my_kml.guideways(main_gw, color="ffffff00") my_kml.conflict_zones(conflict_zones) my_kml.blind_zones([blind_zone]) my_kml.save(kml_file)
def generate_intersection_list(args): ''' Generate the list of intersections for a given city. :param args: Dictionary with function arguments: args['city_name'] = Name of the city. E.g., 'San Francisco, California, USA'. args['data_dir'] = Name of the data directory where the output should be placed. args['crop_radius'] = Crop radius for intersection extraction. Default = 80. args['debug'] = (Optional) Boolean parameter indicating whether DEBUG info must be logged. :returns res: Dictionary with resulting info: res['intersections_signalized'] = List of signalized intersections. res['intersections_other'] = List of all other intersections. res['failed'] = List of intersections, for which data could not be extracted. ''' if args == None: return None city_name = args['city_name'] data_dir = args['data_dir'] output_signalized = "{}/{}_signalized.csv".format(data_dir, city_name) output_other = "{}/{}_other.csv".format(data_dir, city_name) output_nosignal = "{}/{}_nosignal.csv".format(data_dir, city_name) output_failed = "{}/{}_failed.csv".format(data_dir, city_name) pickle_res = "{}/{}.pickle".format(data_dir, city_name) crop_radius = 80 if 'crop_radius' in args.keys(): crop_radius = args['crop_radius'] debug = False if 'debug' in args.keys(): debug = args['debug'] city = api.get_data(city_name=city_name) cross_streets = api.get_intersecting_streets(city) #cross_streets = random.sample(cross_streets, 50) fp_s = open(output_signalized, 'w') fp_n = open(output_nosignal, 'w') fp_o = open(output_other, 'w') fp_f = open(output_failed, 'w') first_s, first_n, first_o, first_f = True, True, True, True header = "Intersection,Longitude,Latitude" meta_keys = [] key_count = 0 res = { 'intersections_signalized': [], 'intersections_nosignal': [], 'intersections_other': [], 'failed': [] } idx = 1 cnt_s, cnt_n, cnt_o, cnt_f = 0, 0, 0, 0 prct = 0 sz = len(cross_streets) for cs in cross_streets: try: intersection = api.get_intersection(cs, city, crop_radius=crop_radius) lon, lat = intersection['center_x'], intersection['center_y'] meta = intersection['meta_data'] signalized, other = False, False if meta['signal_present'] == "yes": signalized = True if meta['signal_present'] == None: other = True if len(meta_keys) == 0: for k in meta.keys(): if k != "timestamp": if k == 'approach_counts': header += ",oneway_approach_count,twoway_approach_count,singleway_approach_count" elif k == 'exit_counts': header += ",oneway_exit_count,twoway_exit_count,singleway_exit_count" else: header += ",{}".format(k) meta_keys.append(k) key_count += 1 header += "\n" buf = "\"{}\",{},{}".format(cs, lon, lat) for k in range(key_count): if meta_keys[k] == 'approach_counts' or meta_keys[ k] == 'exit_counts': buf += ",{},{},{}".format(meta[meta_keys[k]]['oneway'], meta[meta_keys[k]]['twoway'], meta[meta_keys[k]]['singleway']) elif meta_keys[k] == 'approach_street_types' or meta_keys[ k] == 'exit_street_types': buf += ",\"{}\"".format(meta[meta_keys[k]]) elif meta_keys[k] == 'approach_max_speed_limit' or meta_keys[ k] == 'approach_min_speed_limit' or meta_keys[ k] == 'exit_max_speed_limit' or meta_keys[ k] == 'exit_min_speed_limit': val_str = meta[meta_keys[k]].split() buf += ",{}".format(val_str[0]) else: buf += ",{}".format(meta[meta_keys[k]]) buf += "\n" if signalized: res['intersections_signalized'].append(intersection) if first_s: fp_s.write(header) first_s = False fp_s.write(buf) cnt_s += 1 elif other: res['intersections_other'].append(intersection) if first_o: fp_o.write(header) first_o = False fp_o.write(buf) cnt_o += 1 else: res['intersections_nosignal'].append(intersection) if first_n: fp_n.write(header) first_n = False fp_n.write(buf) cnt_n += 1 except: res['failed'].append(cs) if first_f: fp_f.write("Intersection\n") first_f = False fp_f.write("\"{}\"\n".format(cs)) cnt_f += 1 new_prct = 100 * idx / sz print(cs, cnt_s, cnt_n, cnt_o, cnt_f, idx, sz, new_prct, prct) if new_prct - prct >= 1: prct = new_prct if debug: logging.debug( "process_intersections.generate_intersection_list(): Generated {}% ({} signalized, {} without signal, {} other, {} failed out of {})." .format(int(prct), cnt_s, cnt_n, cnt_o, cnt_f, sz)) idx += 1 fp_s.close() fp_n.close() fp_o.close() fp_f.close() if False: f = open(pickle_res, 'wb') pickle.dump(res, f) f.close() return res
import sys from api import get_data, get_intersecting_streets, get_intersection if __name__ == "__main__": city_name = 'Campbell, California, USA' street_tuple = ('Abbey Lane', 'Bucknall Road') print(" ".join(street_tuple)) sys.exit(0) city_data = get_data(city_name=city_name) cross_streets = get_intersecting_streets(city_data) i = 0 for s in cross_streets: i += 1 print(i, s) if i > 3: break x = get_intersection(street_tuple, city_data) print(x.keys()) for m in x['meta_data']: print(m, x['meta_data'][m])
import sys import prog_runner as pr import os import api def parse(x): return x[0], int(x[1:]) def parse_input(inp): return [parse(x.strip()) for x in inp.strip().split("\n")] inp = parse_input(api.get_data(day=12)) sample = parse_input("""F10 N3 F7 R90 F11 """) """ # For writing the code below: (1, 0) N (0,0) E (0, 1) S (-1, 0)
def test_correct_get_returns_response_object(self): """ A correct get request should return Response object """ self.assertIsInstance(get_data(self.url, "3"), requests.models.Response)
def main(argv): print(__doc__) city_name = "Berkeley, California, USA" osm_file = "maps/ComponentDr_NorthFirstSt_SJ.osm" city = api.get_data(city_name=city_name) city = api.get_data(file_name=osm_file) cross_streets = api.get_intersecting_streets(city) sz = len(cross_streets) x_section_addr = cross_streets[0] x_section_addr = ('University Avenue', 'Acton Street') x_section_addr = ('North 1st Street', 'Component Drive') x_section = api.get_intersection(x_section_addr, city, crop_radius=50.0) guideways = api.get_guideways(x_section) crosswalks = api.get_crosswalks(x_section) #fig = api.get_intersection_image(x_section) #fig.savefig("intersection.jpg") fig = api.get_guideway_image(guideways, x_section) fig.savefig("guideways.jpg") #print(x_section_addr) #print(guideways) #print(crosswalks) my_gw, my_rw, my_bw = [], [], [] for g in guideways: if g['type'] == 'drive': my_gw.append(g) if g['type'] == 'railway': my_rw.append(g) if g['type'] == 'bicycle': my_bw.append(g) kml_file = 'GG0.kml' my_kml = KML() my_kml.crosswalk_medians(crosswalks, width=3) my_kml.guideway_medians(my_rw, color="ff00BBBB", width=3) my_kml.guideway_medians(my_bw, color="ff00DD00", width=2) my_kml.guideway_medians(my_gw, width=3) my_kml.save(kml_file) main_idx = [1101] c_idx = [35, 36, 37] g_idx = [1301, 1007, 1714, 1202] r_idx = [2321, 2422] b_idx = [3225, 3229] main_gw, my_cw, my_gw, my_rw, my_bw = [], [], [], [], [] bz_gw_id = 0 # crosswalks for cw in crosswalks: if cw['id'] in c_idx: my_cw.append(cw) for gw in guideways: my_id = "{}-{}".format(gw['origin_lane']['path_id'], gw['destination_lane']['path_id']) if gw['id'] in main_idx: main_gw.append(gw) continue if gw['type'] == 'drive' and gw['id'] in g_idx: my_gw.append(gw) continue if gw['type'] == 'railway' and gw['id'] in r_idx: my_rw.append(gw) continue if gw['type'] == 'bicycle' and gw['id'] in b_idx: my_bw.append(gw) continue conflict_zones = api.get_conflict_zones(main_gw[0], my_gw + my_rw + my_bw + my_cw) my_cz = conflict_zones[6] for cz in conflict_zones: if cz['guideway2_id'] == bz_gw_id: my_cz = cz break blocking_guideways = my_gw point_of_view = (0.1, 0.5) blind_zone = api.get_blind_zone(point_of_view, main_gw[0], my_cz, blocking_guideways, guideways) data = { 'main_gw': main_gw[0], 'vehicle_gw': my_gw, 'bicycle_gw': my_bw, 'rail_gw': my_rw, 'crosswalks': crosswalks, 'conflict_zones': conflict_zones, 'blind_zones': [blind_zone] } fname = "intersection_data" fig = api.get_conflict_zone_image(conflict_zones, x_section) fig.savefig("conflict_zones.jpg") with open(fname + ".pickle", 'wb') as fp: pickle.dump(data, fp) fp.close() with open(fname + ".json", 'w') as fp: json.dump(data, fp, cls=SetEncoder) fp.close() with open(fname + ".yaml", 'w') as fp: yaml.dump(data, fp) fp.close() #fig = api.get_blind_zone_image(blind_zone, main_gw[0], x_section, blocks=blocking_guideways) #fig.savefig("blind_zone.jpg") kml_file = 'GG.kml' my_kml = KML() my_kml.crosswalks(my_cw) my_kml.guideways(my_rw, color="ff00BBBB") my_kml.guideways(my_bw, color="ff00DD00") my_kml.guideways(my_gw, color="ffff0000") my_kml.guideways(main_gw, color="ffffff00") my_kml.conflict_zones(conflict_zones) my_kml.blind_zones([blind_zone]) my_kml.save(kml_file)
# See the License for the specific language governing permissions and # limitations under the License. """https://adventofcode.com/2020/day/18""" import sys import math import copy import collections import itertools import sys import os from functools import lru_cache import prog_runner as pr import api inp = api.get_data(day=18) # "1 + 2 * 3 + 4 * 5 + 6" # inp = inp.strip().split("\n") def do_parse(row): """Parses "((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2". into [[[2, '+', 4, '*', 9], '*', [6, '+', 9, '*', 8, '+', 6], '+', 6], '+', 2, '+', 4, '*', 2] """ tokens = row.replace('(', ' ( ').replace(')', ' ) ').split()
def test_get_raises_value_error_for_non_int(self): """ should raise Value Error if input is not integer """ with self.assertRaises(ValueError): get_data(self.url, "s")
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """https://adventofcode.com/2020/day/13""" import sys import math import copy import collections import sys import prog_runner as pr import os import api inp = api.get_data(day=13) # 17,x,13,19: # t % 17 = 0 # t % 13 = 2 # t % 19 = 3 # ----------- def mod_inv(x, mod): def egcd(a, b): """ a*A + b*B = gcd(a, b), returns (A, B) """ assert a > 0 and b >= 0 if b == 0:
def test_get_returns_nothing_to_show_out_of_range(self): """ should return '* empty *' if input is not within 1 and 100 """ self.assertEqual("* empty *", get_data(self.url, "101"))
def get_data(visualization_id): return make_response(json.dumps(api.get_data(visualization_id, request)))