def climacell( city: str, api_key: Optional[str] = None, first_attempt: bool = True ) -> None: logging.info(f"Sending request for '{city}' to ClimaCell API.") if api_key is None: try: api_key = keys.climacell_api_key except Exception as err: logging.error("Unable to get API key for ClimaCell.") logging.error(err) return coords = get_coordinates(city) try: forecast = cc.get_climacell_data( lat=coords.lat, long=coords.long, api_key=api_key ) fp = save_data("climacell", city, forecast) logging.info(f"Saved results to '{fp.as_posix()}'") except HTTPError as http_err: logging.error(f"ClimaCell API request error ({http_err.response.status_code}).") logging.error(http_err.response.json()["message"]) if first_attempt: logging.info("Retrying request to ClimaCell.") climacell(city=city, api_key=api_key, first_attempt=False) except Exception as err: logging.error(err)
def accuweather( city: str, api_key: Optional[str] = None, first_attempt: bool = True ) -> None: logging.info(f"Sending request for '{city}' to AccuWeather API.") if api_key is None: try: api_key = keys.accuweather_api_key except Exception as err: logging.error("Unable to get API key for AccuWeather.") logging.error(err) return coords = get_coordinates(city) try: forecast = accu.get_accuweather_forecast( lat=coords.lat, long=coords.long, api_key=api_key ) fp = save_data(source="accuweather", city=city, data=forecast) logging.info(f"Saved results to '{fp.as_posix()}'") except HTTPError as http_err: logging.error( f"AccuWeather API request error ({http_err.response.status_code})." ) logging.error(http_err.response.json()["Message"]) if first_attempt and http_err.response.status_code != 503: logging.info("Retrying request to AccuWeather.") accuweather(city=city, api_key=api_key, first_attempt=False) except Exception as err: logging.error(err)
def open_weather_map( city: str, api_key: Optional[str] = None, first_attempt: bool = True ) -> None: logging.info(f"Sending request for '{city}' to OpenWeatherMap API.") if api_key is None: try: api_key = keys.openweathermap_api_key except Exception as err: logging.error("Unable to get API key for OpenWeatherMap.") logging.error(err) return coords = get_coordinates(city) try: forecast = owm.get_openweathermap_data( lat=coords.lat, long=coords.long, api_key=api_key ) fp = save_data("open-weather-map", city, forecast) logging.info(f"Saved results to '{fp.as_posix()}'") except HTTPError as http_err: logging.error( f"OpenWeatherMap API request error ({http_err.response.status_code})." ) logging.error(http_err.response.json()["detail"]) if first_attempt: logging.info("Retrying request to OpenWeatherMap.") open_weather_map(city=city, api_key=api_key, first_attempt=False) except Exception as err: logging.error(err)
def get_weather(): #if coordinates.get_coordinates() in coordinates: lat, lng = (coordinates.get_coordinates()) api_key = "e0a60102f41a97b05ea6fe58b5b5fbd0" host = "https://api.darksky.net/forecast/" url = host + api_key + "/" + str(lat) + "," + str(lng) response = requests.get(url) utc_time = (response.json()["currently"]["time"]) current_time = datetime.datetime.fromtimestamp(utc_time) print("Current Time: ", current_time) weather_summary = response.json()["currently"]["summary"] print("Current Weather: ", weather_summary) current_temp = response.json()["currently"]["temperature"] print("Current Temperature: ", current_temp) future_hourly = response.json()["hourly"]["data"] print("Hourly future updates") for i in future_hourly: time = datetime.datetime.fromtimestamp(i['time']) summary = i['summary'] print("{0} : {1}".format(str(time), summary))
def providers_map(): if ('session_id' not in session): abort(400) center, provider_array = get_coordinates(plan_id=request.args.get("plan_id"), zipcode=session['zipcode'], state=session['state']) response= dict(center=center, provider_array = provider_array) return render_template('providers_map.html', response=response)
def national_weather_service(city: str, n_attempt: int = 1) -> None: logging.info(f"Sending request for '{city}' to NWS API.") coords = get_coordinates(city) try: forecast = nws.get_nws_forecast(lat=coords.lat, long=coords.long) fp = save_data(source="national-weather-service", city=city, data=forecast) logging.info(f"Saved results to '{fp.as_posix()}'") except HTTPError as http_err: logging.error(f"NWS API request error ({http_err.response.status_code}).") logging.error(http_err) if n_attempt <= 5 and http_err.response.status_code != 404: logging.info("Trying NWS API again.") national_weather_service(city=city, n_attempt=n_attempt + 1) except Exception as err: logging.error(err)
def find_nearest(home_location): home_coordinates = get_coordinates(home_location) for center in full_centers_json: distance = distanceLatLong.distance_calc( home_coordinates, center['physical_address'][0]['Coordinates']) if len(center['regular_schedule']) > 0: distance_array.append([ distance, center['alternate_name'], center['physical_address'][0]['address_1'], center['phones'][0]['number'], center['regular_schedule'] ]) else: distance_array.append([ distance, center['alternate_name'], center['physical_address'][0]['address_1'], center['phones'][0]['number'] ]) distance_array.sort() print(print_center(distance_array.pop(0)))
def train(opt: Options): real_label = 1 fake_label = 0 netG = Generator(opt) netD = Discriminator(opt) print(netG) print(netD) netG.apply(weights_init_g) netD.apply(weights_init_d) # summary(netD, (opt.c_dim, opt.x_dim, opt.y_dim)) dataloader = load_data(opt.data_root, opt.x_dim, opt.y_dim, opt.batch_size, opt.workers) x, y, r = get_coordinates(x_dim=opt.x_dim, y_dim=opt.y_dim, scale=opt.scale, batch_size=opt.batch_size) optimizerD = optim.Adam(netD.parameters(), lr=opt.lr, betas=(opt.beta1, opt.beta2)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr, betas=(opt.beta1, opt.beta2)) criterion = nn.BCELoss() # criterion = nn.L1Loss() noise = torch.FloatTensor(opt.batch_size, opt.z_dim) ones = torch.ones(opt.batch_size, opt.x_dim * opt.y_dim, 1) input_ = torch.FloatTensor(opt.batch_size, opt.c_dim, opt.x_dim, opt.y_dim) label = torch.FloatTensor(opt.batch_size, 1) input_ = Variable(input_) label = Variable(label) noise = Variable(noise) if opt.use_cuda: netG = netG.cuda() netD = netD.cuda() x = x.cuda() y = y.cuda() r = r.cuda() ones = ones.cuda() criterion = criterion.cuda() input_ = input_.cuda() label = label.cuda() noise = noise.cuda() noise.data.normal_() fixed_seed = torch.bmm(ones, noise.unsqueeze(1)) def _update_discriminator(data): # for p in netD.parameters(): # p.requires_grad = True # to avoid computation netD.zero_grad() real_cpu, _ = data input_.data.copy_(real_cpu) label.data.fill_(real_label-0.1) # use smooth label for discriminator output = netD(input_) errD_real = criterion(output, label) errD_real.backward() D_x = output.data.mean() # train with fake noise.data.normal_() seed = torch.bmm(ones, noise.unsqueeze(1)) fake = netG(x, y, r, seed) label.data.fill_(fake_label) output = netD(fake.detach()) # add ".detach()" to avoid backprop through G errD_fake = criterion(output, label) errD_fake.backward() # gradients for fake/real will be accumulated D_G_z1 = output.data.mean() errD = errD_real + errD_fake optimizerD.step() # .step() can be called once the gradients are computed return fake, D_G_z1, errD, D_x def _update_generator(fake): # for p in netD.parameters(): # p.requires_grad = False # to avoid computation netG.zero_grad() label.data.fill_(real_label) # fake labels are real for generator cost output = netD(fake) errG = criterion(output, label) errG.backward() # True if backward through the graph for the second time D_G_z2 = output.data.mean() optimizerG.step() return D_G_z2, errG def _save_model(epoch): os.makedirs(opt.models_root, exist_ok=True) if epoch % 1 == 0: torch.save(netG.state_dict(), os.path.join(opt.models_root, "G-cppn-wgan-anime_{}.pth".format(epoch))) torch.save(netD.state_dict(), os.path.join(opt.models_root, "D-cppn-wgan-anime_{}.pth".format(epoch))) def _log(i, epoch, errD, errG, D_x, D_G_z1, D_G_z2, delta_time): print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f Elapsed %.2f s' % (epoch, opt.iterations, i, len(dataloader), errD.data.item(), errG.data.item(), D_x, D_G_z1, D_G_z2, delta_time)) def _save_images(i, epoch): os.makedirs(opt.images_root, exist_ok=True) if i % 100 == 0: fake = netG(x, y, r, fixed_seed) fname = os.path.join(opt.images_root, "fake_samples_{:02}-{:04}.png".format(epoch, i)) vutils.save_image(fake.data[0:64, :, :, :], fname, nrow=8) def _start(): print("Start training") for epoch in range(opt.iterations): for i, data in enumerate(dataloader, 0): start_iter = time.time() fake, D_G_z1, errD, D_x = _update_discriminator(data) D_G_z2, errG = _update_generator(fake) end_iter = time.time() _log(i, epoch, errD, errG, D_x, D_G_z1, D_G_z2, end_iter - start_iter) _save_images(i, epoch) _save_model(epoch) _start()
#!/usr/bin/env python from __future__ import print_function from coordinates import get_coordinates from coordinates import calculate_distances import numpy as np np.set_printoptions(linewidth=100) if __name__ == '__main__': with open('data.txt', 'r') as f: coordinates = get_coordinates(f) part_2_max = 10000 debug = False if debug: from coordinates import test_data coordinates = get_coordinates(test_data) part_2_max = 32 # coordinates.shape = (#coords, 2) distance = calculate_distances(coordinates) # distance.shape == (#coords, x_max, y_min) if debug: # This should yield the answer from the example on AoC.
def __init__(self, opt: Options) -> None: self.opt = opt self.x, self.y, self.r = get_coordinates(opt.x_dim, opt.y_dim, scale=opt.scale, batch_size=opt.batch_size)
from coordinates import get_coordinates from connections import get_connections from estimators import estimate from renderers import draw if __name__ == '__main__': heatmaps_path = './resources/heatmaps.npy' paf_path = './resources/pafs.npy' example_img_path = 'resources/ski.jpg' output_img_path = 'output.jpg' example_image = cv2.imread(example_img_path) heatmaps = np.load(heatmaps_path) paf = np.load(paf_path) cfg = get_default_configuration() coordinates = get_coordinates(cfg, heatmaps) connections = get_connections(cfg, coordinates, paf) skeletons = estimate(cfg, connections) output = draw(cfg, example_image, coordinates, skeletons) cv2.imwrite(output_img_path, output) print(f"Output image: {output_img_path}")
from coordinates import get_coordinates import pyautogui import time import cv2 import matplotlib.pyplot as plt if __name__ == "__main__": target_test_coordinates = (1500, 1500) scanned_image = Image.open("ObjectToBeSegemented/object6.jpg") if scanned_image.size[0] > 1024 or scanned_image.size[1] > 1024: scanned_image.thumbnail((1024, 1024)) size = scanned_image.size res = run(np.array(scanned_image), size) mask = res.convert("L") empty = Image.new("RGBA", size, 0) scanned_image = Image.composite(scanned_image, empty, mask) scanned_image.show() print("scanning ends here") time.sleep(4) img1 = cv2.imread("Screenshots/photo.jpg") img2 = pyautogui.screenshot() img2 = cv2.cvtColor(np.array(img2), cv2.COLOR_RGB2BGR) x, y = get_coordinates(img1, img2, target_test_coordinates) print(x, y) print("mapping complete") cv2.rectangle(img2, (x, y), (x + 5, y + 5), (0, 0, 255), 25) cv2.rectangle( img1, (target_test_coordinates[0], target_test_coordinates[1]), (target_test_coordinates[0] + 5, target_test_coordinates[1] + 5), (0, 0, 255), 25)