def save_total_points(target_folder, infrastructure_graph, demand_point_table, **keywords): ls = [node_d for node_id, node_d in infrastructure_graph.cycle_nodes()] g = keywords properties_folder = make_folder(join(target_folder, 'properties')) reports_folder = make_folder(join(target_folder, 'reports')) # Preserve columns and column order from demand_point_table keys = BASE_KEYS + [ x for x in demand_point_table.columns if x not in BASE_KEYS + FULL_KEYS ] + FULL_KEYS # Include miscellaneous variables miscellaneous_keys = _get_miscellaneous_keys(ls, g, keys) # Save properties/points.csv t = get_table_from_variables(ls, g, keys=keys + miscellaneous_keys) t_path = join(properties_folder, 'points.csv') t.to_csv(t_path) # Save properties/points.shp.zip save_shapefile(join(properties_folder, 'points.shp.zip'), t) # Save reports/examples-by-technology.csv table = t.reset_index().groupby( 'proposed_technology').first().reset_index() table.columns = [format_column_name(x) for x in table.columns] table_path = join(reports_folder, 'example-by-technology.csv') table.transpose().to_csv(table_path, header=False)
def save_total_points( target_folder, infrastructure_graph, demand_point_table, **keywords): ls = [node_d for node_id, node_d in infrastructure_graph.cycle_nodes()] g = keywords properties_folder = make_folder(join(target_folder, 'properties')) reports_folder = make_folder(join(target_folder, 'reports')) # Preserve columns and column order from demand_point_table keys = BASE_KEYS + [ x for x in demand_point_table.columns if x not in BASE_KEYS + FULL_KEYS ] + FULL_KEYS # Include miscellaneous variables miscellaneous_keys = _get_miscellaneous_keys(ls, g, keys) # Save properties/points.csv t = get_table_from_variables(ls, g, keys=keys + miscellaneous_keys) t_path = join(properties_folder, 'points.csv') t.to_csv(t_path) # Save properties/points.shp.zip save_shapefile(join(properties_folder, 'points.shp.zip'), t) # Save reports/examples-by-technology.csv table = t.reset_index().groupby( 'proposed_technology').first().reset_index() table.columns = [format_column_name(x) for x in table.columns] table_path = join(reports_folder, 'example-by-technology.csv') table.transpose().to_csv(table_path, header=False)
def run(self, args): settings = get_appsettings(args.configuration_path) if args.restart and 'data.folder' in settings: remove_safely(settings['data.folder']) for key, value in settings.items(): if key.endswith('.folder'): make_folder(value)
def test_accept_relative_path(self, result_request, tool_definition, data_folder): tool_definition['argument_names'] = ('x_path', ) # Prepare result_folder result = Result(id='xyz') result_folder = result.get_folder(data_folder) bad_folder = make_folder(join(result_folder, 'bad_folder_name')) open(join(bad_folder, 'x.txt'), 'wt') x_folder = make_folder(join(result_folder, 'x')) open(join(x_folder, 'x.txt'), 'wt').write('whee') # Use bad result_id raw_arguments = {'x': 'bad/x/x.txt'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad folder_name raw_arguments = {'x': 'xyz/bad/x.txt'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad path raw_arguments = {'x': 'xyz/x/bad'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad path raw_arguments = {'x': 'xyz/x/../bad/run.py'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use good path raw_arguments = {'x': 'xyz/x/x.txt'} result = result_request.prepare_arguments(tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_accept_relative_path( self, result_request, tool_definition, data_folder): tool_definition['argument_names'] = ('x_path',) # Prepare result_folder result = Result(id='xyz') result_folder = result.get_folder(data_folder) bad_folder = make_folder(join(result_folder, 'bad_folder_name')) open(join(bad_folder, 'x.txt'), 'wt') x_folder = make_folder(join(result_folder, 'x')) open(join(x_folder, 'x.txt'), 'wt').write('whee') # Use bad result_id raw_arguments = {'x': 'bad/x/x.txt'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad folder_name raw_arguments = {'x': 'xyz/bad/x.txt'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad path raw_arguments = {'x': 'xyz/x/bad'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use bad path raw_arguments = {'x': 'xyz/x/../bad/run.py'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use good path raw_arguments = {'x': 'xyz/x/x.txt'} result = result_request.prepare_arguments( tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def migrate_arguments(self, result_arguments, source_folder): d = OrderedDict() make_folder(source_folder) for k, v in result_arguments.items(): if k.endswith('_path'): v = move_path(join(source_folder, basename(v)), v) d[k] = v return d
def run(target_folder, source_folder, sample_count): source_paths = [] for root_folder, folder_names, file_names in walk(source_folder): for file_name in file_names: source_paths.append(join(root_folder, file_name)) make_folder(target_folder) for source_path in sample(source_paths, sample_count): print(source_path) shutil.copy2(source_path, target_folder)
def sandbox(tmpdir): """ source_folder_link -> source_folder source_folder external_file_link -> external_folder_file external_folder_link -> external_folder internal_file_link -> internal_file internal_file internal_folder_link -> internal_folder internal_folder internal_folder_file empty_folder .hidden_file .hidden_folder hidden_folder_file external_folder external_folder_file """ o, temporary_folder = Object(), str(tmpdir) o.parent_folder = temporary_folder o.source_folder = make_folder(join(temporary_folder, 'source_folder')) o.internal_file_path = join(o.source_folder, 'internal_file') open(o.internal_file_path, 'wt').write('internal_file') o.external_folder = make_folder(join(temporary_folder, 'external_folder')) o.external_folder_file_path = join(o.external_folder, 'external_folder_file') open(o.external_folder_file_path, 'wt').write('external_folder_file') o.internal_folder = make_folder(join(o.source_folder, 'internal_folder')) o.internal_folder_file_path = join(o.internal_folder, 'internal_folder_file') open(o.internal_folder_file_path, 'wt').write('internal_folder_file') o.internal_file_link_path = join(o.source_folder, 'internal_file_link') symlink(o.internal_file_path, o.internal_file_link_path) o.external_file_link_path = join(o.source_folder, 'external_file_link') symlink(o.external_folder_file_path, o.external_file_link_path) o.external_folder_link_path = join(o.source_folder, 'external_folder_link') symlink(o.external_folder, o.external_folder_link_path) o.internal_folder_link_path = join(o.source_folder, 'internal_folder_link') symlink(o.internal_folder, o.internal_folder_link_path) o.empty_folder = make_folder(join(o.source_folder, 'empty_folder')) o.hidden_file_path = join(o.source_folder, '.hidden_file') o.hidden_folder = make_folder(join(o.source_folder, '.hidden_folder')) o.hidden_folder_file_path = join(o.hidden_folder, 'hidden_folder_file') open(o.hidden_file_path, 'wt').write('hidden_file') open(o.hidden_folder_file_path, 'wt').write('hidden_folder_file') o.source_folder_link_path = join(temporary_folder, 'source_folder_link') symlink(o.source_folder, o.source_folder_link_path) return o
def sandbox(tmpdir): """ source_folder_link -> source_folder source_folder external_file_link -> external_folder_file external_folder_link -> external_folder internal_file_link -> internal_file internal_file internal_folder_link -> internal_folder internal_folder internal_folder_file empty_folder .hidden_file .hidden_folder hidden_folder_file external_folder external_folder_file """ o, temporary_folder = Object(), str(tmpdir) o.parent_folder = temporary_folder o.source_folder = make_folder(join(temporary_folder, 'source_folder')) o.internal_file_path = join(o.source_folder, 'internal_file') open(o.internal_file_path, 'wt').write('internal_file') o.external_folder = make_folder(join(temporary_folder, 'external_folder')) o.external_folder_file_path = join( o.external_folder, 'external_folder_file') open(o.external_folder_file_path, 'wt').write('external_folder_file') o.internal_folder = make_folder(join(o.source_folder, 'internal_folder')) o.internal_folder_file_path = join( o.internal_folder, 'internal_folder_file') open(o.internal_folder_file_path, 'wt').write('internal_folder_file') o.internal_file_link_path = join(o.source_folder, 'internal_file_link') symlink(o.internal_file_path, o.internal_file_link_path) o.external_file_link_path = join(o.source_folder, 'external_file_link') symlink(o.external_folder_file_path, o.external_file_link_path) o.external_folder_link_path = join(o.source_folder, 'external_folder_link') symlink(o.external_folder, o.external_folder_link_path) o.internal_folder_link_path = join(o.source_folder, 'internal_folder_link') symlink(o.internal_folder, o.internal_folder_link_path) o.empty_folder = make_folder(join(o.source_folder, 'empty_folder')) o.hidden_file_path = join(o.source_folder, '.hidden_file') o.hidden_folder = make_folder(join(o.source_folder, '.hidden_folder')) o.hidden_folder_file_path = join(o.hidden_folder, 'hidden_folder_file') open(o.hidden_file_path, 'wt').write('hidden_file') open(o.hidden_folder_file_path, 'wt').write('hidden_folder_file') o.source_folder_link_path = join(temporary_folder, 'source_folder_link') symlink(o.source_folder, o.source_folder_link_path) return o
def test_geomap(): address = "1724 church avenue brooklyn, ny" target_path = 'results' make_folder(target_path) search_query = 'grocery_or_supermarket' geomap(address, search_query, target_path) assert len(os.listdir(target_path)) == 1 columns = ['latitude', 'longitude', 'fillcolor', 'radiusinpixels'] target_file = os.path.join(target_path, os.listdir(target_path)[0]) assert csv.reader(open(target_file, 'r')).next() == columns
def test_geomap(): address = "1724 church avenue brooklyn, ny" target_path = 'results' make_folder(target_path) search_query = 'grocery_or_supermarket' geomap(address, search_query, target_path) assert len(os.listdir(target_path)) == 1 columns = ['latitude', 'longitude', 'fillcolor', 'radiusinpixels'] target_file = os.path.join(target_path, os.listdir(target_path)[0]) assert csv.reader(open(target_file, 'r')).next() == columns
def sequence_total_grid_mv_line_network(target_folder, infrastructure_graph): drafts_folder = make_folder(join(target_folder, 'drafts')) graph = infrastructure_graph if not graph.edges(): return {} # The network is empty and there is nothing to sequence node_table = get_table_from_graph(graph, [ 'longitude', 'latitude', 'population', 'peak_demand_in_kw']) node_table = node_table.rename(columns={'longitude': 'X', 'latitude': 'Y'}) node_table_path = join(drafts_folder, 'nodes-sequencer.csv') node_table.to_csv(node_table_path) edge_shapefile_path = join(drafts_folder, 'edges.shp') nwp = NetworkPlan.from_files( edge_shapefile_path, node_table_path, prioritize='population', proj='+proj=longlat +datum=WGS84 +no_defs') model = Sequencer(nwp, 'peak.demand.in.kw') model.sequence() order_series = model.output_frame['Sequence..Far.sighted.sequence'] for index, order in order_series.iteritems(): node_id = model.output_frame['Unnamed..0'][index] graph.node[node_id]['grid_mv_network_connection_order'] = order for node1_id, node2_id, edge_d in graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_d['grid_mv_network_connection_order'] = min( node1_d.get('grid_mv_network_connection_order', float('inf')), node2_d.get('grid_mv_network_connection_order', float('inf'))) return {'infrastructure_graph': graph}
def save_total_lines( target_folder, infrastructure_graph, grid_mv_line_geotable): rows = [] for node1_id, node2_id, edge_d in infrastructure_graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_order = edge_d['grid_mv_network_connection_order'] line_length = edge_d['grid_mv_line_adjusted_length_in_meters'] discounted_cost = edge_d['grid_mv_line_discounted_cost'] node1_d, node2_d = order_nodes(node1_d, node2_d, edge_order) wkt = LineString([( node1_d['latitude'], node1_d['longitude'], ), ( node2_d['latitude'], node2_d['longitude'], )]).wkt rows.append([line_length, discounted_cost, edge_order, wkt]) properties_folder = make_folder(join(target_folder, 'properties')) # Save CSV t = DataFrame(rows, columns=[ 'grid_mv_line_adjusted_length_in_meters', 'grid_mv_line_discounted_cost', 'grid_mv_network_connection_order', 'wkt', ]).sort_values('grid_mv_network_connection_order') t_path = join(properties_folder, 'lines.csv') t.to_csv(t_path, index=False) # Save SHP save_shapefile(join( properties_folder, 'lines-proposed.shp.zip'), t) save_shapefile(join( properties_folder, 'lines-existing.shp.zip'), grid_mv_line_geotable)
def save_total_lines(target_folder, infrastructure_graph, grid_mv_line_geotable): rows = [] for node1_id, node2_id, edge_d in infrastructure_graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_order = edge_d['grid_mv_network_connection_order'] line_length = edge_d['grid_mv_line_adjusted_length_in_meters'] discounted_cost = edge_d['grid_mv_line_discounted_cost'] node1_d, node2_d = order_nodes(node1_d, node2_d, edge_order) wkt = LineString([( node1_d['longitude'], node1_d['latitude'], ), ( node2_d['longitude'], node2_d['latitude'], )]).wkt rows.append([line_length, discounted_cost, edge_order, wkt]) properties_folder = make_folder(join(target_folder, 'properties')) # Save CSV t = DataFrame(rows, columns=[ 'grid_mv_line_adjusted_length_in_meters', 'grid_mv_line_discounted_cost', 'grid_mv_network_connection_order', 'wkt', ]).sort_values('grid_mv_network_connection_order') t_path = join(properties_folder, 'lines.csv') t.to_csv(t_path, index=False) # Save SHP save_shapefile(join(properties_folder, 'lines-proposed.shp.zip'), t) save_shapefile(join(properties_folder, 'lines-existing.shp.zip'), grid_mv_line_geotable)
def sandbox(request): """ source_folder_link -> source_folder source_folder internal.txt internal_link -> internal.txt external_link -> external.txt external.txt """ temporary_folder = mkdtemp() o = O() o.source_folder = make_folder(join(temporary_folder, 'source_folder')) o.internal_path = join(o.source_folder, 'internal.txt') o.external_path = join(temporary_folder, 'external.txt') open(o.internal_path, 'wt').write('internal') open(o.external_path, 'wt').write('external') o.internal_link_path = join(o.source_folder, 'internal_link') o.external_link_path = join(o.source_folder, 'external_link') symlink(o.internal_path, o.internal_link_path) symlink(o.external_path, o.external_link_path) o.source_folder_link_path = join(temporary_folder, 'source_folder_link') symlink(o.source_folder, o.source_folder_link_path) yield o rmtree(temporary_folder)
def save_total_summary_by_grid_mv_line(target_folder, infrastructure_graph): rows = [] for node1_id, node2_id, edge_d in infrastructure_graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_order = edge_d['grid_mv_network_connection_order'] line_length = edge_d['grid_mv_line_adjusted_length_in_meters'] discounted_cost = edge_d['grid_mv_line_discounted_cost'] node1_d, node2_d = order_nodes(node1_d, node2_d, edge_order) name = 'From %s to %s' % ( node1_d.get('name', 'the grid'), node2_d.get('name', 'the grid')) rows.append([name, line_length, discounted_cost, edge_order]) t = DataFrame(rows, columns=[ 'Name', 'Length (m)', 'Discounted Cost', 'Proposed MV Network Connection Order', ]).sort_values('Proposed MV Network Connection Order') reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-grid-mv-line.csv') t.to_csv(t_path, index=False) print('summary_by_grid_mv_line_table_path = %s' % t_path)
def assemble_total_grid_mv_line_network( target_folder, infrastructure_graph, grid_mv_line_geotable, grid_mv_network_minimum_point_count): drafts_folder = make_folder(join(target_folder, 'drafts')) node_table = get_table_from_graph(infrastructure_graph, [ 'longitude', 'latitude', 'grid_mv_line_adjusted_budget_in_meters']) node_table_path = join(drafts_folder, 'nodes-networker.csv') node_table.to_csv(node_table_path) nwk_settings = deepcopy(NETWORKER_SETTINGS) nwk_settings['demand_nodes']['filename'] = node_table_path nwk_settings['network_parameters'][ 'minimum_node_count'] = grid_mv_network_minimum_point_count if len(grid_mv_line_geotable): grid_mv_line_shapefile_path = join( drafts_folder, 'existing_grid_mv_line.shp') save_shapefile(grid_mv_line_shapefile_path, grid_mv_line_geotable) nwk_settings['existing_networks'] = { 'filename': grid_mv_line_shapefile_path, 'budget_value': 0} nwk = NetworkerRunner(nwk_settings, drafts_folder) nwk.validate() msf = nwk.run() for node_id in msf.nodes_iter(): if node_id in infrastructure_graph: continue # Add fake nodes so we can add edges to fake nodes longitude, latitude = msf.coords[node_id] infrastructure_graph.add_node(node_id, { 'longitude': longitude, 'latitude': latitude, 'population': 0, 'peak_demand_in_kw': 0}) infrastructure_graph.add_edges_from(msf.edges_iter()) return {'infrastructure_graph': infrastructure_graph}
def save_total_summary_by_grid_mv_line(target_folder, infrastructure_graph): rows = [] for node1_id, node2_id, edge_d in infrastructure_graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_order = edge_d['grid_mv_network_connection_order'] line_length = edge_d['grid_mv_line_adjusted_length_in_meters'] discounted_cost = edge_d['grid_mv_line_discounted_cost'] node1_d, node2_d = order_nodes(node1_d, node2_d, edge_order) name = 'From %s to %s' % (node1_d.get( 'name', 'the grid'), node2_d.get('name', 'the grid')) rows.append([name, line_length, discounted_cost, edge_order]) t = DataFrame(rows, columns=[ 'Name', 'Length (m)', 'Discounted Cost', 'Proposed MV Network Connection Order', ]).sort_values('Proposed MV Network Connection Order') reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-grid-mv-line.csv') t.to_csv(t_path, index=False) print('summary_by_grid_mv_line_table_path = %s' % t_path)
def save_arguments(g, script_path, keys): d = g.copy() target_folder = d.pop('target_folder') if not target_folder: target_folder = make_enumerated_folder_for(script_path) arguments_folder = make_folder(join(target_folder, 'arguments')) # Migrate paths path_by_key = OrderedDict() for k, v in d.items(): if not k.endswith('_path'): continue file_name = _get_argument_file_name(k, v) # Save a copy of each file shutil.copy(v, join(arguments_folder, file_name)) # Make the reference point to the local copy path_by_key[k] = file_name d = sort_dictionary(d, keys) d.update(path_by_key) # Save global arguments as JSON target_file = open(join(arguments_folder, 'arguments.json'), 'w') json.dump(d, target_file, indent=4, separators=(',', ': ')) # Save global arguments as CSV target_file = open(join(arguments_folder, 'arguments.csv'), 'w') csv_writer = csv.writer(target_file) for x in d.items(): csv_writer.writerow(x)
def sequence_total_grid_mv_line_network(target_folder, infrastructure_graph): drafts_folder = make_folder(join(target_folder, 'drafts')) graph = infrastructure_graph if not graph.edges(): return {} # The network is empty and there is nothing to sequence node_table = get_table_from_graph( graph, ['longitude', 'latitude', 'population', 'peak_demand_in_kw']) node_table = node_table.rename(columns={'longitude': 'X', 'latitude': 'Y'}) node_table_path = join(drafts_folder, 'nodes-sequencer.csv') node_table.to_csv(node_table_path) edge_shapefile_path = join(drafts_folder, 'edges.shp') nwp = NetworkPlan.from_files(edge_shapefile_path, node_table_path, prioritize='population', proj='+proj=longlat +datum=WGS84 +no_defs') model = Sequencer(nwp, 'peak.demand.in.kw') model.sequence() order_series = model.output_frame['Sequence..Far.sighted.sequence'] for index, order in order_series.iteritems(): node_id = model.output_frame['Unnamed..0'][index] graph.node[node_id]['grid_mv_network_connection_order'] = order for node1_id, node2_id, edge_d in graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_d['grid_mv_network_connection_order'] = min( node1_d.get('grid_mv_network_connection_order', float('inf')), node2_d.get('grid_mv_network_connection_order', float('inf'))) return {'infrastructure_graph': graph}
def get_database_url(d): try: dialect = d['database']['dialect'] except KeyError: dialect = 'sqlite' if dialect == 'sqlite': path = get_database_path(d) make_folder(dirname(path)) return f'{dialect}:///{path}' try: username = get_value(d, 'database', 'username') password = get_value(d, 'database', 'password') host = get_value(d, 'database', 'host') port = get_value(d, 'database', 'port') name = get_value(d, 'database', 'name') except KeyError as e: raise InvisibleRoadsError(f'database {e} required for {dialect}') return f'{dialect}://{username}:{password}@{host}:{port}/{name}'
def sort_music(source_folder, target_folder): progress = Progress() for root_folder, folder_names, file_names in walk(source_folder): progress.file_count = len(file_names) for file_index, file_name in enumerate(file_names, 1): source_path = join(root_folder, file_name) if not getsize(source_path): # Skip empty files continue try: target_path = get_target_path(source_path) except DuplicateError as target_path: progress.show(file_index, 'skip', str(target_path)) continue make_folder(dirname(target_path)) progress.show(file_index, 'copy', target_path) copy2(source_path, target_path) # Preserve file attributes if progress.file_count: print('') return dict(progress.count_by_message)
def sort_music(source_folder, target_folder): progress = Progress() for root_folder, folder_names, file_names in walk(source_folder): progress.file_count = len(file_names) for file_index, file_name in enumerate(file_names, 1): source_path = join(root_folder, file_name) if not getsize(source_path): # Skip empty files continue try: target_path = get_target_path(source_path) except DuplicateError as target_path: progress.show(file_index, 'skip', str(target_path)) continue make_folder(dirname(target_path)) progress.show(file_index, 'copy', target_path) copy2(source_path, target_path) # Preserve file attributes if progress.file_count: print('') return dict(progress.count_by_message)
def save_total_report_by_location( target_folder, infrastructure_graph, demand_point_table, **keywords): ls = [node_d for node_id, node_d in infrastructure_graph.cycle_nodes()] g = keywords reports_folder = make_folder(join(target_folder, 'reports')) t = get_table_from_variables(ls, g, keys=BASE_KEYS + [ x for x in demand_point_table.columns if x not in BASE_KEYS + SOME_KEYS ] + SOME_KEYS) t.columns = [format_column_name(x) for x in t.columns] t_path = join(reports_folder, 'report-by-location.csv') t.to_csv(t_path)
def save_total_report_by_location(target_folder, infrastructure_graph, demand_point_table, **keywords): ls = [node_d for node_id, node_d in infrastructure_graph.cycle_nodes()] g = keywords reports_folder = make_folder(join(target_folder, 'reports')) t = get_table_from_variables(ls, g, keys=BASE_KEYS + [ x for x in demand_point_table.columns if x not in BASE_KEYS + SOME_KEYS ] + SOME_KEYS) t.columns = [format_column_name(x) for x in t.columns] t_path = join(reports_folder, 'report-by-location.csv') t.to_csv(t_path)
def run_script(tool_definition, result_arguments, result_folder, target_folder=None, environment=None): timestamp, environment = time.time(), environment or {} if 'target_folder' in tool_definition['argument_names']: y = make_folder(abspath(target_folder or join(result_folder, 'y'))) result_arguments = OrderedDict(result_arguments, target_folder=y) # Record result_configuration = ResultConfiguration(result_folder) result_configuration.save_tool_location(tool_definition) result_configuration.save_result_arguments(result_arguments, environment) # Run command_terms = split_arguments( render_command(tool_definition['command_template'], result_arguments).replace('\n', ' ')) result_properties = OrderedDict() try: with cd(tool_definition['configuration_folder']): command_process = subprocess.Popen(command_terms, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=merge_dictionaries( environment, SCRIPT_ENVIRONMENT)) except OSError: standard_output, standard_error = None, 'Command not found' else: standard_output, standard_error = [ x.rstrip().decode('utf-8') for x in command_process.communicate() ] if command_process.returncode: result_properties['return_code'] = command_process.returncode # Save result_properties.update( _process_streams(standard_output, standard_error, result_folder, tool_definition)) result_properties['execution_time_in_seconds'] = time.time() - timestamp result_configuration.save_result_properties(result_properties) result_configuration.save_result_script(tool_definition, result_arguments) if 'target_folder' in tool_definition['argument_names']: link_path(join(result_folder, 'y'), result_arguments['target_folder']) return result_properties
def save_total_summary_by_technology( target_folder, discounted_cost_by_technology, levelized_cost_by_technology, count_by_technology): t = concat([ Series(discounted_cost_by_technology), Series(levelized_cost_by_technology), Series(count_by_technology), ], axis=1) t.index.name = 'Technology' t.index = [format_technology(x) for x in t.index] t.columns = [ 'Discounted Cost', 'Levelized Cost Per kWh Consumed', 'Count', ] reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-technology.csv') t.to_csv(t_path) print('summary_by_technology_table_path = %s' % t_path)
def save_total_summary_by_technology(target_folder, discounted_cost_by_technology, levelized_cost_by_technology, count_by_technology): t = concat([ Series(discounted_cost_by_technology), Series(levelized_cost_by_technology), Series(count_by_technology), ], axis=1) t.index.name = 'Technology' t.index = [format_technology(x) for x in t.index] t.columns = [ 'Discounted Cost', 'Levelized Cost Per kWh Consumed', 'Count', ] reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-technology.csv') t.to_csv(t_path) print('summary_by_technology_table_path = %s' % t_path)
def assemble_total_grid_mv_line_network(target_folder, infrastructure_graph, grid_mv_line_geotable, grid_mv_network_minimum_point_count): drafts_folder = make_folder(join(target_folder, 'drafts')) node_table = get_table_from_graph( infrastructure_graph, ['longitude', 'latitude', 'grid_mv_line_adjusted_budget_in_meters']) node_table_path = join(drafts_folder, 'nodes-networker.csv') node_table.to_csv(node_table_path) nwk_settings = deepcopy(NETWORKER_SETTINGS) nwk_settings['demand_nodes']['filename'] = node_table_path nwk_settings['network_parameters'][ 'minimum_node_count'] = grid_mv_network_minimum_point_count if len(grid_mv_line_geotable): grid_mv_line_shapefile_path = join(drafts_folder, 'existing_grid_mv_line.shp') save_shapefile(grid_mv_line_shapefile_path, grid_mv_line_geotable) nwk_settings['existing_networks'] = { 'filename': grid_mv_line_shapefile_path, 'budget_value': 0 } nwk = NetworkerRunner(nwk_settings, drafts_folder) nwk.validate() msf = nwk.run() for node_id in msf.nodes(): if node_id in infrastructure_graph: continue # Add fake nodes so we can add edges to fake nodes longitude, latitude = msf.coords[node_id] infrastructure_graph.add_node( node_id, **{ 'longitude': longitude, 'latitude': latitude, 'population': 0, 'peak_demand_in_kw': 0 }) infrastructure_graph.add_edges_from(msf.edges_iter()) return {'infrastructure_graph': infrastructure_graph}
def post(self): stop_servers() notebook_path = self.get_argument('notebook_path') tool_port = S['tool_port'] process_arguments = [ 'crosscompute', 'serve', abspath(notebook_path), '--host', S['tool_host'], '--port', str(tool_port), '--base_url', S['tool_base_url'], '--without_browser', '--without_logging', '--with_debugging' ] for x in 'brand_url', 'website_name', 'website_owner': y = expect_variable(x, '') if y: process_arguments.extend(('--' + x, y)) open(join( make_folder(DEBUGGING_FOLDER), 'preview-tool.sh', ), 'wt').write(' '.join( ('"%s"' % x if ' ' in x else x) for x in process_arguments)) process = Popen(process_arguments, stderr=PIPE) d = {} for x in range(10): try: requests.get('http://127.0.0.1:%s' % tool_port) except ConnectionError: sleep(1) else: status_code = 200 d['tool_url'] = self._get_tool_url() break if process.poll(): status_code = 400 d['text'] = unicode_safely(process.stderr.read().strip()) break else: status_code = 400 self.set_status(status_code) self.write(d)
def save_total_summary_by_location( target_folder, infrastructure_graph, selected_technologies): rows = [] for node_id, node_d in infrastructure_graph.cycle_nodes(): xs = [ node_d['name'], node_d.get('grid_mv_network_connection_order', '')] xs.extend(node_d[ x + '_local_levelized_cost_per_kwh_consumed' ] for x in selected_technologies) xs.append(format_technology(node_d['proposed_technology'])) rows.append(xs) t = DataFrame(rows, columns=[ 'Name', 'Proposed MV Network Connection Order', ] + [format_technology(x) for x in selected_technologies] + [ 'Proposed Technology', ]).sort_values('Proposed MV Network Connection Order') reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-location.csv') t.to_csv(t_path, index=False) print('summary_by_location_table_path = %s' % t_path)
def save_total_summary_by_location(target_folder, infrastructure_graph, selected_technologies): rows = [] for node_id, node_d in infrastructure_graph.cycle_nodes(): xs = [ node_d['name'], node_d.get('grid_mv_network_connection_order', '') ] xs.extend(node_d[x + '_local_levelized_cost_per_kwh_consumed'] for x in selected_technologies) xs.append(format_technology(node_d['proposed_technology'])) rows.append(xs) t = DataFrame(rows, columns=[ 'Name', 'Proposed MV Network Connection Order', ] + [format_technology(x) for x in selected_technologies] + [ 'Proposed Technology', ]).sort_values('Proposed MV Network Connection Order') reports_folder = make_folder(join(target_folder, 'reports')) t_path = join(reports_folder, 'summary-by-location.csv') t.to_csv(t_path, index=False) print('summary_by_location_table_path = %s' % t_path)
def run_script( tool_definition, result_arguments, result_folder, target_folder=None, environment=None): timestamp, environment = time.time(), environment or {} if 'target_folder' in tool_definition['argument_names']: y = make_folder(abspath(target_folder or join(result_folder, 'y'))) result_arguments = OrderedDict(result_arguments, target_folder=y) # Record result_configuration = ResultConfiguration(result_folder) result_configuration.save_tool_location(tool_definition) result_configuration.save_result_arguments(result_arguments, environment) # Run command_terms = split_arguments(render_command(tool_definition[ 'command_template'], result_arguments).replace('\n', ' ')) result_properties = OrderedDict() try: with cd(tool_definition['configuration_folder']): command_process = subprocess.Popen( command_terms, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=merge_dictionaries(environment, SCRIPT_ENVIRONMENT)) except OSError: standard_output, standard_error = None, 'Command not found' else: standard_output, standard_error = [x.rstrip().decode( 'utf-8') for x in command_process.communicate()] if command_process.returncode: result_properties['return_code'] = command_process.returncode # Save result_properties.update(_process_streams( standard_output, standard_error, result_folder, tool_definition)) result_properties['execution_time_in_seconds'] = time.time() - timestamp result_configuration.save_result_properties(result_properties) result_configuration.save_result_script(tool_definition, result_arguments) if 'target_folder' in tool_definition['argument_names']: link_path(join(result_folder, 'y'), result_arguments['target_folder']) return result_properties
def test_accept_upload_id(self, result_request, tool_definition, data_folder): tool_definition['argument_names'] = ('x_path', ) # Prepare upload_folder upload = Upload(id='xyz', owner_id=0) upload_folder = make_folder(upload.get_folder(data_folder)) open(join(upload_folder, 'raw.txt'), 'wt') open(join(upload_folder, 'name.txt'), 'wt') # Use bad upload_id raw_arguments = {'x': 'a'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use upload_id that does not have expected data_type raw_arguments = {'x': 'xyz'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use upload_id that has expected data_type file_name = StringType.get_file_name() open(join(upload_folder, file_name), 'wt').write('whee') result = result_request.prepare_arguments(tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
def test_accept_upload_id( self, result_request, tool_definition, data_folder): tool_definition['argument_names'] = ('x_path',) # Prepare upload_folder upload = Upload(id='xyz', owner_id=0) upload_folder = make_folder(upload.get_folder(data_folder)) open(join(upload_folder, 'raw.txt'), 'wt') open(join(upload_folder, 'name.txt'), 'wt') # Use bad upload_id raw_arguments = {'x': 'a'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use upload_id that does not have expected data_type raw_arguments = {'x': 'xyz'} with raises(HTTPBadRequest) as e: result_request.prepare_arguments(tool_definition, raw_arguments) assert e.value.detail['x'] == 'invalid' # Use upload_id that has expected data_type file_name = StringType.get_file_name() open(join(upload_folder, file_name), 'wt').write('whee') result = result_request.prepare_arguments( tool_definition, raw_arguments) assert open(result.arguments['x_path']).read() == 'whee'
import matplotlib matplotlib.use('Agg') # Prevent no $DISPLAY environment variable warning from invisibleroads_macros.disk import make_folder from matplotlib import pyplot as plt from os.path import join from sys import argv target_folder = make_folder(argv[1]) # Render integer print('an_integer = 100') # Render table target_path = join(target_folder, 'a.csv') open(target_path, 'w').write("""\ a,b,c 1,2,3 4,5,6 7,8,9""") print('a_table_path = ' + target_path) # Render image target_path = join(target_folder, 'a.png') figure = plt.figure() plt.plot([1, 2, 3], [1, 2, 2]) figure.savefig(target_path) print('an_image_path = ' + target_path) # Render geotable (map) target_path = join(target_folder, 'b.csv') open(target_path, 'w').write("""\ Latitude,Longitude,Description 27.3364347,-82.5306527,A 27.3364347,-82.5306527,B
import datetime import shutil from flask import Flask, render_template, request, send_from_directory from invisibleroads_macros import disk, security from os.path import basename, join from tempfile import mkdtemp from run import parse_date from run import run as run_script app = Flask(__name__) results_folder = disk.make_folder('results') @app.route('/') def index(): return render_template('index.html') @app.route('/run', methods=['GET', 'POST']) def run(): target_folder = mkdtemp() journal_names = sorted(set( request.form.get('journal_names', '').splitlines())) text_terms = sorted(set( request.form.get('text_terms', '').splitlines())) mesh_terms = sorted(set( request.form.get('mesh_terms', '').splitlines())) custom_expression = request.form.get('custom_expression', '') try:
def prepare_result_response_folder(data_folder): results_folder = join(data_folder, 'results') result_folder = make_enumerated_folder(results_folder) result_id = basename(result_folder) return result_id, make_folder(join(result_folder, 'response'))
import re from invisibleroads_macros.disk import make_folder from os.path import basename, join from shutil import copy from sys import argv TARGET_FOLDER = make_folder(argv[1]) EXAMPLE_PATTERN = re.compile(r'_example.*') def prepare_map(map_name, show_table=True): source_name = EXAMPLE_PATTERN.sub('', map_name) source_path = source_name + '.csv' target_path = join(TARGET_FOLDER, basename(source_path)) copy(source_path, target_path) if show_table: print('%s_table_path = %s' % (map_name, target_path)) print('%s_geotable_path = %s' % (map_name, target_path)) prepare_map('geometry_point_example') prepare_map('geometry_point_example_dark', show_table=False) prepare_map('geometry_point_example_streets_satellite', show_table=False) prepare_map('geometry_wkt_example') prepare_map('radius_pixel_example') prepare_map('radius_pixel_range_example') prepare_map('radius_pixel_mean_example') prepare_map('radius_pixel_sum_example') prepare_map('specific_color_example')
import csv from collections import Counter from invisibleroads_macros.disk import make_folder from invisibleroads_macros.text import compact_whitespace from os.path import join from sys import argv target_folder, text_path = argv[1:] character_counter = Counter(compact_whitespace(open(text_path).read())) del character_counter[' '] target_path = join(make_folder(target_folder), 'character_count.csv') csv_writer = csv.writer(open(target_path, 'w')) csv_writer.writerows(character_counter.most_common()) print('character_count_table_path = ' + target_path)
import csv from geopy import GoogleV3 from invisibleroads_macros.disk import make_folder from os.path import join from sys import argv target_folder, address_text_path = argv[1:] geocode = GoogleV3().geocode location_table_path = join(make_folder(target_folder), 'locations.csv') csv_writer = csv.writer(open(location_table_path, 'w')) csv_writer.writerow(['Address', 'Latitude', 'Longitude']) for address in open(address_text_path): location = geocode(address) csv_writer.writerow([ address.strip(), location.latitude, location.longitude]) print('location_table_path = ' + location_table_path)
import datetime import shutil from flask import Flask, render_template, request, send_from_directory from invisibleroads_macros.disk import compress, make_folder from invisibleroads_macros.security import make_random_string from invisibleroads_macros.text import remove_punctuation from os.path import basename, join from tempfile import mkdtemp from run import parse_date from run import run as run_script app = Flask(__name__) results_folder = make_folder('results') @app.route('/') def index(): return render_template('index.html') @app.route('/run', methods=['GET', 'POST']) def run(): target_folder = mkdtemp() journal_names = load_terms(request.form.get('journal_names', '')) author_names = load_terms(request.form.get('author_names', '')) text_terms = load_terms(request.form.get('text_terms', '')) mesh_terms = load_terms(request.form.get('mesh_terms', '')) custom_expression = request.form.get('custom_expression', '') try:
import re from invisibleroads_macros.disk import make_folder from os.path import basename, join from shutil import copy from sys import argv TARGET_FOLDER = make_folder(argv[1]) EXAMPLE_PATTERN = re.compile(r'_example.*') def prepare_map(map_name, show_table=True): source_name = EXAMPLE_PATTERN.sub('', map_name) source_path = source_name + '.csv' target_path = join(TARGET_FOLDER, basename(source_path)) copy(source_path, target_path) if show_table: print('%s_table_path = %s' % (map_name, target_path)) print('%s_geotable_path = %s' % (map_name, target_path)) prepare_map('geometry_point_example') prepare_map('geometry_point_example_dark', show_table=False) prepare_map('geometry_point_example_streets_satellite', show_table=False) prepare_map('geometry_wkt_example') prepare_map('radius_meter_example') prepare_map('radius_pixel_example') prepare_map('radius_pixel_range_example') prepare_map('radius_pixel_mean_example') prepare_map('radius_pixel_sum_example')