def bayesian_adjust_file(prior_filename, data_file, output_base, res, lower_left_coord, upper_right_coord): """Adjust the file given in data file using a Bayesian approach, treating the given prior_filename as an image representing the prior, and computing a new modern coordinate pair as the MAP of the posterior resulting, all using grid approximation.""" lon_lim = lower_left_coord[0], upper_right_coord[0] lat_lim = upper_right_coord[1], lower_left_coord[1] prior = ImagePrior(prior_filename, lat_lim, lon_lim, res) places = pd.read_csv(data_file, encoding='utf-8') places.rename(columns={ 'modern_lat': 'original_lat', 'modern_lon': 'original_lon' }, inplace=True) known = places[places.disposition == 'known'] known.is_copy = False known.ix[:, 'modern_lat'] = known.ix[:, 'original_lat'] known.ix[:, 'modern_lon'] = known.ix[:, 'original_lon'] unknown = places[places.disposition != 'known'] unknown.is_copy = False adjusted = unknown.apply(prior.bayesian_adjust, axis=1) unknown = unknown.merge(adjusted, left_index=True, right_index=True) kml_filename = os.path.join(PTOL_HOME, 'Data', output_base + '.kml') csv_filename = os.path.join(PTOL_HOME, 'Data', output_base + '.csv') common.write_kml_file(kml_filename, None, known, unknown) common.write_csv_file(csv_filename, known, unknown)
def main(filename, model, places): known, unknown = common.split_places(places) knownx = known.loc[:, XCOLS] knowny = known.loc[:, YCOLS] model.fit(knownx, knowny) unknownx = unknown.loc[:, XCOLS] unknowny = model.predict(unknownx) unknown.loc[:, YCOLS] = unknowny title = ' '.join(os.path.basename(filename)[0:-4].split('_')) common.write_kml_file(filename, None, known, unknown) common.write_csv_file(filename[0:-4] + '.csv', known, unknown) common.write_map_file(filename[0:-4] + '.pdf', known, unknown, 30, 24, 300, 'ptol_name', title) common.write_map_file(filename[0:-4] + '.png', known, unknown, 30, 24, 300, 'ptol_name', title)
def bayesian_adjust_file(prior_filename, data_file, output_base, res): """Adjust the file given in data file using a Bayesian approach, treating the given prior_filename as an image representing the prior, and computing a new modern coordinate pair as the MAP of the posterior resulting, all using grid approximation.""" prior = ImagePrior(prior_filename, (35,5), (65,95), res) places = pd.read_csv(data_file, encoding='cp1252') places.rename(columns={ 'modern_lat': 'original_lat', 'modern_lon': 'original_lon'}, inplace=True) known = places[places.disposition == 'known'] known.is_copy = False known.ix[:, 'modern_lat'] = known.ix[:, 'original_lat'] known.ix[:, 'modern_lon'] = known.ix[:, 'original_lon'] unknown = places[places.disposition == 'unknown'] unknown.is_copy = False adjusted = unknown.apply(prior.bayesian_adjust, axis=1) unknown = unknown.merge(adjusted, left_index=True, right_index=True) kml_filename = os.path.join(PTOL_HOME, 'Data', output_base+'.kml') csv_filename = os.path.join(PTOL_HOME, 'Data', output_base+'.csv') common.write_kml_file(kml_filename, None, known, unknown) common.write_csv_file(csv_filename, known, unknown)
import common import extra import constant # Get Price Data from URL price_data = common.get_data(constant.URL)['series'][0]['data'] # Format Date for each row for i, date_and_price in enumerate(price_data): price_data[i][0] = common.format_date(date_and_price[0]) # Write daily prices to data folder common.write_csv_file(constant.DAILY_PRICE_FILE_PATH, constant.HEADING, price_data) #write monthly data to csv with normalization common.write_csv_file(constant.MONTHLY_PRICE_FILE_PATH, constant.HEADING, extra.normalize_to_monthly(price_data))