def get_locations(): if "azure_cluster" in config and "azure_location" in config[ "azure_cluster"]: return utils.tolist(config["azure_cluster"]["azure_location"]) elif "gs_cluster" in config and "gs_location" in config["gs_cluster"]: return utils.tolist(config["gs_cluster"]["gs_location"]) else: return []
def delete_storage(docreate=True): locations = get_locations() storages = utils.tolist(config["azure_cluster"]["storages"]) for grp in storages: configGrp = config["azure_cluster"][grp] delete_storage_group(locations, configGrp, docreate) os.remove("gs_cluster_file.yaml")
def create_storage(docreate=True): locations = get_locations() storages = utils.tolist(config["azure_cluster"]["storages"]) for grp in storages: configGrp = config["azure_cluster"][grp] create_storage_group(locations, configGrp, docreate) save_config()
def photometric_reconstuction_loss(target, sources, intrinsics, intrinsics_inv, depth, exp_mask, pose, rotation_mode = 'euler', padding_mode = 'zeros'): """ # Arguments target: sources: intrinsics: intrinsics_inv: depth: rotation_mode: padding_mode: # Returns loss """ def one_scale(depth, exp_mask): assert(exp_mask is None or depth.size()[2:] == exp_mask.size()[2:]) assert(pose.size(1) == len(sources)) reconstruction_loss = 0 b, _, h, w = depth.size() downscale = target.size(2) / h target_scaled = F.adaptive_avg_pool2d(target, (h, w)) sources_scaled = [F.adaptive_avg_pool2d(i, (h, w)) for i in sources] intrinsics_scaled = torch.cat((intrinsics[:, 0:2] / downscale, intrinsics[:, 2:]), dim=1) intrinsics_scaled_inv = torch.cat((intrinsics_inv[:, :, 0:2] * downscale, intrinsics_inv[:, :, 2:]), dim=2) for idx, source in enumerate(sources_scaled): current_pose = pose[:, i] source_warped = inverse_warp(source, depth[:, 0], current_pose, intrinsics_scaled, intrinsics_scaled_inv, rotation_mode, padding_mode) out_of_bound = 1 - (source_warped == 0).prod(1, keepdim=True).type_as(source_warped) diff = (target_scaled - source_warped) * out_of_bound if exp_mask is not None: diff = diff * exp_mask[:, idx: idx+1].expand_as(diff) reconstruction_loss += diff.abs().mean() assert((reconstruction_loss == reconstruction_loss).data[0] == 1) return reconstruction_loss exp_mask, depth = tolist(exp_mask, depth) loss = 0 for d, mask in zip(depth, exp_mask): loss += one_scale(d, mask) return loss
def add_storage_config(gsConfig): locations = get_locations() storages = utils.tolist(config["azure_cluster"]["storages"]) gsConfig["storage"] = {} for grp in storages: configGrp = config["azure_cluster"][grp] for location in locations: actual_location = get_region_string(location) if actual_location is not None: if grp not in gsConfig["storage"]: gsConfig["storage"][grp] = {} storagename = configGrp["name"] + location gsConfig["storage"][grp][location] = storagename
def set_angular(self, icut=0, ncuts=1, iaddbins=None, power_weight=1, modes=None): iaddbins = utils.tolist(iaddbins, n=2, value=None) power_weight = utils.tolist(power_weight, n=2, value=1) catalogues = [] for imode, mode in enumerate(self.modes): path = self.params['path_randoms'][mode] catalogue = Catalogue.load(path) if iaddbins[imode] is not None: catalogue = catalogue[catalogue['iaddbin'] == iaddbins[imode]] self.logger.info( 'Additional cut {:d} for {} randoms ({:d} objects).'. format(iaddbins[imode], mode, catalogue.size)) catalogue['Position'] /= catalogue.distance()[:, None] power = power_weight[imode] if power != 1: self.logger.info('Raising weights {} to power {:.4f}.'.format( mode, power)) catalogue['Weight'] **= power catalogues.append(catalogue) catalogue1 = catalogues[0].slice(icut, ncuts) catalogue2 = catalogues[1] pyreal2pcf = PyReal2PCF(ells=[0], los='endpoint', losn=0, **self.params) pyreal2pcf.set_grid() self.result = pyreal2pcf.run(catalogue1, catalogue2)
def config_app_with_google(configApp, provider): locations = get_locations() storages = utils.tolist(config["azure_cluster"]["storages"]) if not ("Services" in configApp): configApp["Services"] = {} for location in locations: actual_location = get_region_string(location) if actual_location is not None: for grp in ["cdn"]: configGrp = config["azure_cluster"][grp] storagename = configGrp["name"] + location if not (location in configApp["Services"]): configApp["Services"][location] = {} configAppGrp = configApp["Services"][location] if not ("cdns" in configAppGrp): configAppGrp["cdns"] = {} if provider not in configAppGrp["cdns"]: configAppGrp["cdns"][provider] = [] endpoint = "https://storage.googleapis.com/%s/" % storagename configAppGrp["cdns"][provider].append(endpoint)
def smooth_loss(disp): def gradient(pred): D_dy = pred[:, :, 1:] - pred[:, :, :-1] D_dx = pred[:, :, :, 1:] - pred[:, :, :, :-1] return D_dx, D_dy disp = tolist(disp) loss = 0 weight = 1. for disp_scaled in disp: dx, dy = gradient(disp_scaled) dx2, dxdy = gradient(dx) dydx, dy2 = gradient(dy) loss += (dx2.abs().mean() + dxdy.abs().mean() + dydx.abs().mean() + dy2.abs().mean()) * weight weight /= 2.83 # 2 * 2^0.5 return loss
def set_angular(self, icut=0, icut1=0, ncuts=1, iaddbins=None, modes=None): iaddbins = utils.tolist(iaddbins, n=3, value=None) catalogues = [] for imode, mode in enumerate(self.modes): path = self.params['path_randoms'][mode] catalogue = Catalogue.load(path) if iaddbins[imode] is not None: catalogue = catalogue[catalogue['iaddbin'] == iaddbins[imode]] self.logger.info( 'Additional cut {:d} for {} randoms ({:d} objects).'. format(iaddbins[imode], mode, catalogue.size)) catalogue['Position'] /= catalogue.distance()[:, None] catalogues.append(catalogue) icutibins = catalogues[1].attrs['icutibins'][icut] assert (icutibins == catalogues[2].attrs['icutibins'][icut]).all() icutnbins = len(icutibins) catalogue1 = catalogues[0].slice(icut1, ncuts) pyreal3pcf = PyReal3PCF(ells=[0], los='endpoint', losn=0, **self.params) pyreal3pcf.set_grid() self.result = 0 for ibin, bin in enumerate(icutibins): self.logger.info('Correlating slice {:d} ({:d}/{:d}).'.format( bin, ibin + 1, icutnbins)) catalogue2 = catalogues[1][catalogues[1]['ibin'] == bin] catalogue3 = catalogues[2][catalogues[2]['ibin'] == bin] if not catalogue2 or not catalogue3: continue self.result += pyreal3pcf.copy().run(catalogue1, catalogue2, catalogue3)
def get_radial_density(self, catalogue, iaddbins=None, rwidth=2., redges=None, normalize=True, density=True, power_weight=1): iaddbins = utils.tolist(iaddbins) normalize = utils.tolist(normalize, n=len(iaddbins), fill=-1) density = utils.tolist(density, n=len(iaddbins), fill=-1) power_weight = utils.tolist(power_weight, n=len(iaddbins), value=1) distance = catalogue.distance() dmin, dmax = distance.min(), distance.max() self.logger.info('Comoving distances: {:.1f} - {:.1f}.'.format( dmin, dmax)) if redges is not None: radialedges = scipy.array(redges) rwidth = scipy.mean(scipy.diff(radialedges)) rmin, rmax = radialedges.min(), radialedges.max() if (rmin > dmin) or (rmax < dmax): raise ValueError( 'Provided radial-edges ({:.1f} - {:.1f}) do not encompass the full survey ({:.1f} - {:.1f}).' .format(rmin, rmax, dmin, dmax)) self.logger.info( 'Provided radial-edges of width: {:.1f} and range: {:.1f} - {:.1f}.' .format(rwidth, rmin, rmax)) nbins = len(radialedges) - 1 else: self.logger.info('Provided radial-width: {:.1f}.'.format(rwidth)) nbins = scipy.rint((dmax - dmin) / rwidth).astype(int) radialedges = scipy.linspace(dmin, dmax + 1e-9, nbins + 1) self.logger.info( 'There are {:d} radial-bins with an average of {:.1f} objects.'. format(nbins, len(catalogue) * 1. / nbins)) def radial_density(distance, weight, normalize=True, density=True): toret = stats.binned_statistic(distance, values=weight, statistic='sum', bins=radialedges)[0] if density: toret /= radial_volume(radialedges) if normalize: toret /= toret.sum() return toret radial = (radialedges[:-1] + radialedges[1:]) / 2. densities, weights = [], [] for iaddbin_, normalize_, density_, power_ in zip( iaddbins, normalize, density, power_weight): if iaddbin_ is not None: mask = catalogue['iaddbin'] == iaddbin_ self.logger.info('Additional cut {:d} ({:d} objects).'.format( iaddbin_, mask.sum())) else: mask = catalogue.trues() weight = catalogue['Weight'][mask] if power_ != 1: self.logger.info( 'Raising weights to power {:.4f}.'.format(power_)) weight **= power_ densities.append( radial_density(distance[mask], weight, normalize=normalize_, density=density_)) weights.append(weight) self.params['catalogue'] = None return radial, densities, weights