def markers(): logging.debug('getting markers') kwargs = get_kwargs() logging.debug('querying markers in bounding box: %s' % kwargs) is_thin = (kwargs['zoom'] < CONST.MINIMAL_ZOOM) accidents = Marker.bounding_box_query(is_thin, yield_per=50, involved_and_vehicles=False, **kwargs) discussion_args = ('ne_lat', 'ne_lng', 'sw_lat', 'sw_lng', 'show_discussions') discussions = DiscussionMarker.bounding_box_query( **{arg: kwargs[arg] for arg in discussion_args}) if request.values.get('format') == 'csv': date_format = '%Y-%m-%d' return Response( generate_csv(accidents), headers={ "Content-Type": "text/csv", "Content-Disposition": 'attachment; ' 'filename="Anyway-accidents-from-{0}-to-{1}.csv"'.format( kwargs["start_date"].strftime(date_format), kwargs["end_date"].strftime(date_format)) }) else: # defaults to json return generate_json(accidents, discussions, is_thin)
def retrieve_clusters(ne_lat, ne_lng, sw_lat, sw_lng, start_date, end_date, fatal, severe, light, inaccurate, zoom): marker_boxes = divide_to_boxes(ne_lat, ne_lng, sw_lat, sw_lng) result_futures = [] logging.info("number of cores: " + str(multiprocessing.cpu_count())) with concurrent.futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count()) as executor: for marker_box in marker_boxes: markers_in_box = Marker.bounding_box_query( marker_box[0], marker_box[1], marker_box[2], marker_box[3], start_date, end_date, fatal, severe, light, inaccurate, ).all() result_futures.append(executor.submit(calculate_clusters, markers_in_box, zoom)) completed_futures = concurrent.futures.wait(result_futures) result = [] for future in completed_futures.done: result.extend(future.result()) return result
def charts_data(): logging.debug('getting charts data') kwargs = get_kwargs() accidents, vehicles, involved = Marker.bounding_box_query(is_thin=False, yield_per=50, involved_and_vehicles=True, **kwargs) accidents_list = [acc.serialize() for acc in accidents] vehicles_list = [vehicles_data_refinement(veh.serialize()) for veh in vehicles] involved_list = [involved_data_refinement(inv.serialize()) for inv in involved] return Response(json.dumps({'accidents': accidents_list, 'vehicles': vehicles_list, 'involved': involved_list}), mimetype="application/json")
def setUp(self): kwargs = {'approx': True, 'show_day': 7, 'show_discussions': True, 'accurate': True, 'surface': 0, 'weather': 0, 'district': 0, 'show_markers': True, 'show_fatal': True, 'show_time': 24, 'show_intersection': 3, 'show_light': True, 'sw_lat': 32.06711066128336, 'controlmeasure': 0, 'ne_lng': 34.799307929669226, 'show_severe': True, 'start_time': 25, 'acctype': 0, 'separation': 0, 'show_urban': 3, 'show_lane': 3, 'sw_lng': 34.78879367033085, 'zoom': 17, 'show_holiday': 0, 'end_time': 25, 'road': 0, 'ne_lat': 32.07254745790576, 'start_date': "01/01/2014", 'end_date': "01/01/2015"} self.query = Marker.bounding_box_query(yield_per=50, **kwargs) print self.query
def setUp(self): kwargs = {'approx': True, 'show_day': 7, 'show_discussions': True, 'accurate': True, 'surface': 0, 'weather': 0, 'district': 0, 'show_markers': True, 'show_fatal': True, 'show_time': 24, 'show_intersection': 3, 'show_light': True, 'sw_lat': 32.067363446951944, 'controlmeasure': 0, 'start_date': datetime.date(2014, 1, 1), 'ne_lng': 34.79928962966915, 'show_severe': True, 'end_date': datetime.date(2016, 1, 1), 'start_time': 25, 'acctype': 0, 'separation': 0, 'show_urban': 3, 'show_lane': 3, 'sw_lng': 34.78877537033077, 'zoom': 17, 'show_holiday': 0, 'end_time': 25, 'road': 0, 'ne_lat': 32.072427482938345} self.query_args = kwargs self.query = Marker.bounding_box_query(yield_per=50, **kwargs)
def setUp(self): self.query = Marker.bounding_box_query(ne_lat=32.36, ne_lng=35.088, sw_lat=32.292, sw_lng=34.884, start_date=start_date, end_date=end_date, fatal=False, severe=True, light=True, inaccurate=False, is_thin=False, yield_per=None)
def retrieve_clusters(**kwargs): marker_boxes = divide_to_boxes(kwargs["ne_lat"], kwargs["ne_lng"], kwargs["sw_lat"], kwargs["sw_lng"]) result_futures = [] logging.info("number of cores: " + str(multiprocessing.cpu_count())) with concurrent.futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count()) as executor: for marker_box in marker_boxes: kwargs.update(marker_box) markers_in_box = Marker.bounding_box_query(**kwargs).all() result_futures.append(executor.submit(calculate_clusters, markers_in_box, kwargs["zoom"])) completed_futures = concurrent.futures.wait(result_futures) result = [] for future in completed_futures.done: result.extend(future.result()) return result
def retrieve_clusters(**kwargs): marker_boxes = divide_to_boxes(kwargs['ne_lat'], kwargs['ne_lng'], kwargs['sw_lat'], kwargs['sw_lng']) result_futures = [] logging.info('number of cores: ' + str(multiprocessing.cpu_count())) with concurrent.futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count()) as executor: for marker_box in marker_boxes: kwargs.update(marker_box) markers_in_box = Marker.bounding_box_query(**kwargs).all() result_futures.append(executor.submit(calculate_clusters, markers_in_box, kwargs['zoom'])) completed_futures = concurrent.futures.wait(result_futures) result = [] for future in completed_futures.done: result.extend(future.result()) return result
def charts_data(): logging.debug('getting charts data') kwargs = get_kwargs() accidents, vehicles, involved = Marker.bounding_box_query( is_thin=False, yield_per=50, involved_and_vehicles=True, **kwargs) accidents_list = [acc.serialize() for acc in accidents] vehicles_list = [ vehicles_data_refinement(veh.serialize()) for veh in vehicles ] involved_list = [ involved_data_refinement(inv.serialize()) for inv in involved ] return Response(json.dumps({ 'accidents': accidents_list, 'vehicles': vehicles_list, 'involved': involved_list }), mimetype="application/json")
def retrieve_clusters(ne_lat, ne_lng, sw_lat, sw_lng, start_date, end_date, fatal, severe, light, inaccurate, zoom): marker_boxes = divide_to_boxes(ne_lat, ne_lng, sw_lat, sw_lng) result_futures = [] logging.info('number of cores: ' + str(multiprocessing.cpu_count())) with concurrent.futures.ThreadPoolExecutor( max_workers=multiprocessing.cpu_count()) as executor: for marker_box in marker_boxes: markers_in_box = Marker.bounding_box_query( marker_box[0], marker_box[1], marker_box[2], marker_box[3], start_date, end_date, fatal, severe, light, inaccurate).all() result_futures.append( executor.submit(calculate_clusters, markers_in_box, zoom)) completed_futures = concurrent.futures.wait(result_futures) result = [] for future in completed_futures.done: result.extend(future.result()) return result
def markers(): logging.debug('getting markers') kwargs = get_kwargs() logging.debug('querying markers in bounding box: %s' % kwargs) is_thin = (kwargs['zoom'] < CONST.MINIMAL_ZOOM) accidents = Marker.bounding_box_query(is_thin, yield_per=50, involved_and_vehicles=False, **kwargs) discussion_args = ('ne_lat', 'ne_lng', 'sw_lat', 'sw_lng', 'show_discussions') discussions = DiscussionMarker.bounding_box_query(**{arg: kwargs[arg] for arg in discussion_args}) if request.values.get('format') == 'csv': date_format = '%Y-%m-%d' return Response(generate_csv(accidents), headers={ "Content-Type": "text/csv", "Content-Disposition": 'attachment; ' 'filename="Anyway-accidents-from-{0}-to-{1}.csv"' .format(kwargs["start_date"].strftime(date_format), kwargs["end_date"].strftime(date_format)) }) else: # defaults to json return generate_json(accidents, discussions, is_thin)
def retrieve_clusters(ne_lat, ne_lng, sw_lat, sw_lng, start_date, end_date, fatal, severe, light, inaccurate, zoom): start_time = time.time() filtered_markers = Marker.bounding_box_query(ne_lat, ne_lng, sw_lat, sw_lng, start_date, end_date, fatal, severe, light, inaccurate).all() print('bounding_box_query took ' + str(time.time() - start_time)) return generate_clusters_json(filtered_markers, zoom)
def test_light_severity_filter(self): kwargs = self.query_args.copy() kwargs['show_light'] = False markers = Marker.bounding_box_query(yield_per=50, **kwargs) for marker in markers: self.assertTrue(marker.severity != 3)
def test_approx_filter(self): kwargs = self.query_args.copy() kwargs['accurate'] = False markers = Marker.bounding_box_query(yield_per=50, **kwargs) for marker in markers: self.assertTrue(marker.locationAccuracy != 1)