def instances_fee(): try: from gcp import fee auth = Auth() auth.get_service(request) data = request.args.to_dict() ebs = list(eval(data['ebs'])) instance_type = data['instance_type'] os = data['os'] quantity = int(data['quantity']) total_compute = round( fee.instance_price[instance_type]['price'][ Region().get_region_name(auth.region)] * quantity, 2) total_ebs = 0 for each_ebs in ebs: total_ebs += round( fee.disk_price[each_ebs['type']][Region().get_region_name( auth.region)] * int(each_ebs['size']), 2) total = total_compute + total_ebs res = {'compute': total_compute, 'ebs': total_ebs, 'total': total} return jsonify(res) except errors.HttpError as e: msg = json.loads(e.content) return jsonify(msg=msg['error']['message']), msg['error']['code']
def main(args, session): logging.info('Deleting existing regions') session.query(Region).delete() logging.info('Loading species') clements_to_ioc = { species.scientific_name_clements: species.scientific_name for species in session.query(Species) if species.scientific_name_clements } logging.info('Processing regions') regions = [] warned_scientific_names = set() with open(args.ebd_regions_file, 'rt') as input_file: # Hardcoding the CSV length here is awful but it's just for progress reporting anyway. for row in progress.percent(csv.DictReader(input_file), 14835): region_id = int(row['region_id']) centroid_lat = float(row['centroid_lat']) centroid_lon = float(row['centroid_lon']) observations_by_scientific_name = json.loads( row['observations_by_scientific_name']) species_weight_by_scientific_name = {} for scientific_name_clements, num_observations in observations_by_scientific_name.items( ): scientific_name = clements_to_ioc.get(scientific_name_clements) if not scientific_name: if (scientific_name_clements not in warned_scientific_names and '/' not in scientific_name_clements # Uncertainties. and 'sp.' not in scientific_name_clements.split( ' ') # Only genus, not species. and 'x' not in scientific_name_clements.split( ' ') # Hybrids. and 'undescribed' not in scientific_name_clements # Undescribed forms. ): # This happens a fair bit; in the "IOC vs other lists" # these rows are typically reddish brown, indicating # "species not recognized by IOC". logging.warning( f'Scientific name {scientific_name_clements} not found ' '(probably recognized by Clements but not IOC)') warned_scientific_names.add(scientific_name_clements) continue species_weight_by_scientific_name[ scientific_name] = num_observations regions.append( Region(region_id=region_id, lat_start=centroid_lat - _SIZE_LAT / 2, lat_end=centroid_lat + _SIZE_LAT / 2, lon_start=centroid_lon - _SIZE_LON / 2, lon_end=centroid_lon + _SIZE_LON / 2, centroid_lat=centroid_lat, centroid_lon=centroid_lon, species_weight_by_scientific_name= species_weight_by_scientific_name)) session.bulk_save_objects(regions)
def init_step(self, img, col="hsv", k=50, minsize=50, avec = [1,1,1,1]): #minsize=20 self.regions = [] self.similarities = [] self.bboxes = [] if col=="hsv": img = color.rgb2hsv(img) #convert to hsv elif col=="lab": img = color.rgb2lab(img) #convert to lab #default: RGB else: img = img.astype(float) #to prevent overflow self.im_size=float(img.shape[0]*img.shape[1]) #create segmentation self.segment = segmentation.felzenszwalb(img, k, 0.8, minsize) self.num_classes = np.amax(self.segment)+1 # number of classes #print("Starting with " + str(self.num_classes) + " classes!") self.num_regions = self.num_classes #calc gradient maps gradrx = si.filters.gaussian_filter(img[:,:,0], 1.0, order=(0,1), mode="nearest") gradgx = si.filters.gaussian_filter(img[:,:,1], 1.0, order=(0,1), mode="nearest") gradbx = si.filters.gaussian_filter(img[:,:,2], 1.0, order=(0,1), mode="nearest") gradry = si.filters.gaussian_filter(img[:,:,0], 1.0, order=(1,0), mode="nearest") gradgy = si.filters.gaussian_filter(img[:,:,1], 1.0, order=(1,0), mode="nearest") gradby = si.filters.gaussian_filter(img[:,:,2], 1.0, order=(1,0), mode="nearest") gradmap_r = np.sqrt(gradrx**2+gradry**2) gradmap_g = np.sqrt(gradgx**2+gradgy**2) gradmap_b = np.sqrt(gradbx**2+gradby**2) #use np.arctan2 instead of arctan to handle 0 gradients anglemap_r = np.arctan2(gradry,gradrx)*180/np.pi anglemap_g = np.arctan2(gradgy,gradgx)*180/np.pi anglemap_b = np.arctan2(gradby,gradbx)*180/np.pi #create regions arrays self.regions = [] for i in range(self.num_classes): r = Region(i) r.evaluate(img, self.segment, gradmap_r, gradmap_g, gradmap_b, anglemap_r, anglemap_g, anglemap_b) self.regions.append(r) self.bboxes.append(r.bbox) #calc region neighbours for r in self.regions: self.find_neighbours(r) #calculate similiarities s(ri,rj) for i,r in enumerate(self.regions): for n in r.neighbours: temp = [i, n, self.similarity(r, self.regions[n])] temp2 = [n, i, temp[2]] #check if already calculated if temp2 not in self.similarities: self.similarities.append(temp) self.similarities = sorted(self.similarities, key=itemgetter(2), reverse=True)
def make_some_data(self): img_path="data_sim/boulder.png" region_coordinates={'latmin':0,'latmax':0,'lonmin':0,'lonmax':0} Boulder=Region('Boulder',img_path,region_coordinates) Boulder.initPointTargets() Boulder.generateLayers() total_targets=np.zeros((100,100,100)) for i, (gt_layer,sb_layer,pb_layer) in enumerate(zip(Boulder.ground_truth_layers,Boulder.shake_base_layers,Boulder.pixel_bleed_layers)): total_targets=total_targets+gt_layer+sb_layer+pb_layer total=total_targets+Boulder.noise_layer+Boulder.structured_noise_layer+Boulder.shotgun_noise_layer return total
def init_sections_state(self): net_manager.init_devices() for section in self.vbox.get_children(): if section.show_or_hide(): section.section_show() section.init_state() else: section.section_hide() slider._append_page(Region(), "region") from setting_page_ui import SettingUI self.setting_page_ui = SettingUI(None, None) slider._append_page(self.setting_page_ui, "setting")
def copy_regions(regions, bin_size, seg): from regions import Region rlist = [] for r in regions: if r.rid in seg.id_to_region: rc = seg.id_to_region[r.rid] else: cc = copy_regions(r.children(), bin_size, seg) max_point = tuple([b * i for b, i in zip(bin_size, r.max_point)]) rc = Region(seg, r.rid, max_point, cc) rlist.append(rc) return rlist
def merge(self, r1, r2): newr = Region(self.num_regions) #propagate informations newr.num_pix = r1.num_pix + r2.num_pix newr.colordesc = (r1.colordesc*r1.num_pix+r2.colordesc*r2.num_pix)/newr.num_pix newr.texturedesc = (r1.texturedesc*r1.num_pix+r2.texturedesc*r2.num_pix)/newr.num_pix #bounding box tl = np.minimum(r1.bbox[0],r2.bbox[0]) tr = np.minimum(r1.bbox[1],r2.bbox[1]) bl = np.maximum(r1.bbox[2],r2.bbox[2]) br = np.maximum(r1.bbox[3],r2.bbox[3]) newr.bbox = np.array([tl, tr, bl, br]) #update neighbours """ for e in r1.neighbours: if e not in newr.neighbours: newr.neighbours.append(e) for e in r2.neighbours: if e not in newr.neighbours: newr.neighbours.append(e) """ newr.neighbours = r1.neighbours.union(r2.neighbours) #now recalc neighbour lists of other regions for n in newr.neighbours: """ if r1.id in self.regions[n].neighbours: self.regions[n].neighbours.remove(r1.id) if r2.id in self.regions[n].neighbours: self.regions[n].neighbours.remove(r2.id) self.regions[n].neighbours.append(newr.id) """ self.regions[n].neighbours.discard(r1.id) self.regions[n].neighbours.discard(r2.id) self.regions[n].neighbours.add(newr.id) #remove old regions """ if r1.id in newr.neighbours: newr.neighbours.remove(r1.id) if r2.id in newr.neighbours: newr.neighbours.remove(r2.id) """ newr.neighbours.discard(r1.id) newr.neighbours.discard(r2.id) return newr
def create_regions(s, rids, rcolors, refpts, slevels, pids, task): if task: task.updateStatus('Making ID table') id_to_index = dict([(id, i) for i, id in enumerate(rids)]) if task: task.updateStatus('Collecting child region IDs') id_to_child_ids = {} n = len(rids) for i in range(n): pid = pids[i] if pid > 0: if pid in id_to_child_ids: id_to_child_ids[pid].append(rids[i]) else: id_to_child_ids[pid] = [rids[i]] if task: task.updateStatus('Ordering IDs') from regions import Region ids = depth_order(rids, id_to_child_ids, set()) rlist = [] for c, rid in enumerate(ids): if rid in id_to_child_ids: children = [s.id_to_region[cid] for cid in id_to_child_ids[rid]] else: children = [] i = id_to_index[rid] r = Region(s, rid, refpts[i], children) # TODO: Get wrappy error setting surface piece color to numpy array. r.color = tuple(rcolors[i]) if not slevels is None: r.smoothing_level = slevels[i] rlist.append(r) if task and c % 1000 == 0: task.updateStatus('Created %d of %d regions' % (c, n)) if not slevels is None: s.smoothing_level = max(slevels) return rlist
def initialise_regions(self) -> None: """Create all the region data classes""" f = open("data/country_names.csv") data = f.readlines() for line in data[1:]: name, continent = DataSet.parse_csv_line(line) region = Region( name=name, continent=continent ) self.regions[name] = region f.close() # To ensure consistency between different data sets, # some names will be changed uk = self.regions["United Kingdom of Great Britain & Northern Ireland"] del self.regions["United Kingdom of Great Britain & Northern Ireland"] self.regions["United Kingdom"] = uk us = self.regions["United States of America"] del self.regions["United States of America"] self.regions["United States"] = us
def run(options): try: conn = teslajson.Connection(options.user, options.passwd) except Exception as e: fatalError(f"Failed to connect: {e}") logging.info(f"Connection: {conn}") logging.info(f"Number of vehicles: {len(conn.vehicles)}") if options.verbose > 1: n = 1 for v in conn.vehicles: print(f"Vehicle #{n}:", end='') json.dump(v, sys.stdout, indent=4, sort_keys=True) print("") n += 1 carVINs = opts.confs['cars'].keys() if opts.VIN: carVINs = [opts.VIN] if not carVINs: fatalError( "Must provide the VIN(s) of one or more car(s) to be tracked") logging.debug(f"cars: {carVINs}") teslaVINs = [v['vin'] for v in conn.vehicles] vinList = [v for v in teslaVINs if v in carVINs] if not vinList: fatalError("Unable to find requested cars in Tesla API") notFound = list(set(carVINs) - set(vinList)) if notFound: fatalError(f"Cars asked for, but not found in Tesla API: {notFound}") logging.debug(f"Watching: {vinList}") notAskedFor = list(set(teslaVINs) - set(vinList)) if notAskedFor: logging.warning( f"Cars Tesla API knows about, but not asked for: {notAskedFor}") vehicles = {v['vin']: v for v in conn.vehicles if v['vin'] in vinList} if options.verbose > 3: print("VEHICLES:") json.dump(vehicles, sys.stdout, indent=4, sort_keys=True) print("") if opts.schemaFile: schemaFile = opts.schemaFile else: schemaFile = opts.confs.get('schema') if not os.path.isfile(schemaFile): fatalError(f"Invalid DB schema file: {schemaFile}") with open(schemaFile, "r") as f: schema = yaml.load(f, Loader=yaml.Loader) if opts.dbDir: dbDir = opts.dbDir else: dbDir = opts.confs.get('dbDir') if dbDir: if not os.path.isdir(dbDir): fatalError(f"Invalid DB directory path: {dbDir}") else: if opts.verbose: logging.warning("Not logging data to DB") cars = {} cmdQs = {} respQs = {} trackers = {} for vin in vinList: conf = opts.confs['cars'][vin] cars[vin] = car = Car(vin, conf, vehicles[vin]) logging.info(f"Waking up {vin}: {car.getName()}") if not car.wakeUp(): logging.warning( f"Unable to wake up '{car.getName()}', skipping...") time.sleep(random.randint(5, 15)) continue # give car time to wake up and dither start times across cars #### FIXME time.sleep(random.randint(15, 45)) cdb = None if dbDir: dbFile = os.path.join(dbDir, vin + ".db") cdb = teslaDB.CarDB(vin, dbFile, schema) tables = schema['tables'].keys() settings = dict(DEF_SETTINGS) dictMerge(settings, opts.confs.get('config', {}).get('settings', {})) regions = [Region(r) for r in conf.get('regions', [])] notifier = Notifier( opts.confs.get('config', {}).get('eventNotifiers', {})) cmdQs[vin] = mp.Queue() respQs[vin] = mp.Queue() tracker = Tracker(car, cdb, tables, settings, regions, notifier, cmdQs[vin], respQs[vin]) logging.info(f"Tracker: {vin}") trackers[vin] = mp.Process(target=tracker.run, args=()) for vin in trackers: trackers[vin].start() if options.interactive: commandInterpreter(trackers, cmdQs, respQs) for vin in trackers: trackers[vin].join() logging.debug(f"Results for {vin}: {dumpQueue(respQs[vin])}")
def on_landing(self, landing_obj): # get the distance between each celestial (in millions of miles) distance = distance_between(self.xy[0], self.linkRegion.xy[0], self.xy[1], self.linkRegion.xy[1]) # divide that distance by the slingshot travel rate travel_time = distance / 25 return Payload(self.get_LID(), ['Hello'], isTaskMaker=True, taskDuration=travel_time, onCompleteFunc=landing_obj.change_region, onCompleteArgs=self.linkRegion) if __name__ == "__main__": Region((0, 0)) Region((1, 0)) Primus = Planet('Primus', (0, 0)) Evan = Player(155782008826494976, 'Evan') James = Player(155783768307793920, 'James') Eriq = Player(155560259065348097, 'Eriq') Emily = Player(612827918984413256, 'Em-Head') Storm = Player(155794075432255489, 'Storm') y = Halcyon('Evan', (0, 0)) x = Halcyon('James', (0, 0)) z = Halcyon('Eriq', (0, 0)) a = Halcyon('Emily', (0, 0)) Automaton('Evan', celestial='Primus', territory='North') BuildingPlan('Evan', 'Automaton Cradle', tags=['Metal', 'Base', 'SPAWNER|Automaton'],