Example #1
0
    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape, lock,
                             counter):
        new_bins = GreatCircleBinner(domain.min_latitude, domain.max_latitude,
                                     lat_lng_count, domain.min_longitude,
                                     domain.max_longitude, lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins
Example #2
0
    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape,
                             lock, counter):
        new_bins = GreatCircleBinner(
            bounds["minimum_latitude"], bounds["maximum_latitude"],
            lat_lng_count, bounds["minimum_longitude"],
            bounds["maximum_longitude"], lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins
Example #3
0
def plot_raydensity(map_object, station_events, min_lat, max_lat, min_lng,
                    max_lng, rot_axis, rot_angle):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    bounds = rotations.get_max_extention_of_domain(
        min_lat, max_lat, min_lng, max_lng, rotation_axis=rot_axis,
        rotation_angle_in_degree=rot_angle)

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        e_point = Point(event["latitude"], event["longitude"])
        for station in stations.itervalues():
            station_event_list.append((e_point, Point(station["latitude"],
                                                      station["longitude"])))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = ["Progress: ", progressbar.Percentage(),
               progressbar.Bar(), "", progressbar.ETA()]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape,
                             lock, counter):
        new_bins = GreatCircleBinner(
            bounds["minimum_latitude"], bounds["maximum_latitude"],
            lat_lng_count, bounds["minimum_longitude"],
            bounds["maximum_longitude"], lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out
    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(
        bounds["minimum_latitude"], bounds["maximum_latitude"], lat_lng_count,
        bounds["minimum_longitude"], bounds["maximum_longitude"],
        lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(multiprocessing.Process(
            target=great_circle_binning, args=(chunks[_i], collected_bins_data,
                                               collected_bins.bins.shape, lock,
                                               counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    title = "%i Events with %i recorded 3 component waveforms" % (
        len(station_events), circle_count)
    # plt.gca().set_title(title, size="large")
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(data)
        data += 0.1
        data[np.isinf(data)] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120) ** 2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.drawmapboundary(fill_color='#bbbbbb')
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=0)

    lngs, lats = collected_bins.coordinates
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines()
    map_object.drawcountries(linewidth=0.2)
    map_object.drawmeridians(np.arange(0, 360, 30), **LINESTYLE)
    map_object.drawparallels(np.arange(-90, 90, 30), **LINESTYLE)
Example #4
0
def plot_raydensity(
    map_object,
    station_events: List[Tuple[dict, dict]],
    domain: object,
    projection: cp.crs.Projection,
):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.

    :param map_object: The cartopy domain plot object
    :type map_object: cp.mpl.geoaxes.GeoAxes
    :param station_events: A list of tuples with two dictionaries
    :type station_events: List[Tuple[dict, dict]]
    :param domain: An object with the domain plot
    :type domain: object
    :param projection: cartopy projection object
    :type projection: cp.crs.Projection
    """
    import ctypes as C
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:

        e_point = Point(event["latitude"], event["longitude"])
        for station in stations.values():

            p = Point(station["latitude"], station["longitude"])
            station_event_list.append((e_point, p))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    if circle_count < 1000:
        lat_lng_count = 1000
    elif circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print("\nLaunching %i great circle calculations on %i CPUs..." %
          (circle_count, cpu_count))

    widgets = [
        "Progress: ",
        progressbar.Percentage(),
        progressbar.Bar(),
        "",
        progressbar.ETA(),
    ]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape, lock,
                             counter):
        new_bins = GreatCircleBinner(
            domain.min_lat,
            domain.max_lat,
            lat_lng_count,
            domain.min_lon,
            domain.max_lon,
            lat_lng_count,
        )
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out

    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(
        domain.min_lat,
        domain.max_lat,
        lat_lng_count,
        domain.min_lon,
        domain.max_lon,
        lat_lng_count,
    )

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in range(cpu_count):
        processes.append(
            multiprocessing.Process(
                target=great_circle_binning,
                args=(
                    chunks[_i],
                    collected_bins_data,
                    collected_bins.bins.shape,
                    lock,
                    counter,
                ),
            ))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    stations = chain.from_iterable(
        (_i[1].values() for _i in station_events if _i[1]))
    # Remove duplicates
    stations = [(_i["latitude"], _i["longitude"]) for _i in stations]
    stations = set(stations)
    title = "%i Events, %i unique raypaths, " "%i unique stations" % (
        len(station_events),
        circle_count,
        len(stations),
    )
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(np.clip(data, a_min=0.5, a_max=data.max()))
        data[data >= 0.0] += 0.1
        data[data < 0.0] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120)**2

    lngs, lats = collected_bins.coordinates
    ln, la = project_points(projection, lngs, lats)

    map_object.pcolormesh(ln,
                          la,
                          data,
                          cmap=cmap,
                          vmin=0,
                          vmax=max_val,
                          zorder=10)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.add_feature(cp.feature.COASTLINE, zorder=13)
    map_object.add_feature(cp.feature.BORDERS, linestyle=":", zorder=13)
Example #5
0
def plot_raydensity(map_object, station_events, domain):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif import rotations
    from lasif.domain import RectangularSphericalSection
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    if not isinstance(domain, RectangularSphericalSection):
        raise NotImplementedError(
            "Raydensity currently only implemented for rectangular domains. "
            "Should be easy to implement for other domains. Let me know.")

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        if domain.rotation_angle_in_degree:
            # Rotate point to the non-rotated domain.
            e_point = Point(*rotations.rotate_lat_lon(
                event["latitude"], event["longitude"], domain.rotation_axis,
                -1.0 * domain.rotation_angle_in_degree))
        else:
            e_point = Point(event["latitude"], event["longitude"])
        for station in stations.itervalues():
            # Rotate point to the non-rotated domain if necessary.
            if domain.rotation_angle_in_degree:
                p = Point(*rotations.rotate_lat_lon(
                    station["latitude"], station["longitude"],
                    domain.rotation_axis, -1.0 *
                    domain.rotation_angle_in_degree))
            else:
                p = Point(station["latitude"], station["longitude"])
            station_event_list.append((e_point, p))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = [
        "Progress: ",
        progressbar.Percentage(),
        progressbar.Bar(), "",
        progressbar.ETA()
    ]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape, lock,
                             counter):
        new_bins = GreatCircleBinner(domain.min_latitude, domain.max_latitude,
                                     lat_lng_count, domain.min_longitude,
                                     domain.max_longitude, lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out

    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(domain.min_latitude,
                                       domain.max_latitude, lat_lng_count,
                                       domain.min_longitude,
                                       domain.max_longitude, lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(
            multiprocessing.Process(target=great_circle_binning,
                                    args=(chunks[_i], collected_bins_data,
                                          collected_bins.bins.shape, lock,
                                          counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    stations = chain.from_iterable(
        (_i[1].values() for _i in station_events if _i[1]))
    # Remove duplicates
    stations = [(_i["latitude"], _i["longitude"]) for _i in stations]
    stations = set(stations)
    title = "%i Events, %i unique raypaths, "\
            "%i unique stations" % (len(station_events), circle_count,
                                    len(stations))
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(np.clip(data, a_min=0.5, a_max=data.max()))
        data[data >= 0.0] += 0.1
        data[data < 0.0] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120)**2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=0)

    lngs, lats = collected_bins.coordinates
    # Rotate back if necessary!
    if domain.rotation_angle_in_degree:
        for lat, lng in zip(lats, lngs):
            lat[:], lng[:] = rotations.rotate_lat_lon(
                lat, lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines()
    map_object.drawcountries(linewidth=0.2)
Example #6
0
def plot_raydensity(map_object, station_events, domain):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif import rotations
    from lasif.domain import RectangularSphericalSection
    from lasif.tools.great_circle_binner import GreatCircleBinner
    from lasif.utils import Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    if not isinstance(domain, RectangularSphericalSection):
        raise NotImplementedError(
            "Raydensity currently only implemented for rectangular domains. "
            "Should be easy to implement for other domains. Let me know.")

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        if domain.rotation_angle_in_degree:
            # Rotate point to the non-rotated domain.
            e_point = Point(*rotations.rotate_lat_lon(
                event["latitude"], event["longitude"], domain.rotation_axis,
                -1.0 * domain.rotation_angle_in_degree))
        else:
            e_point = Point(event["latitude"], event["longitude"])
        for station in stations.itervalues():
            # Rotate point to the non-rotated domain if necessary.
            if domain.rotation_angle_in_degree:
                p = Point(*rotations.rotate_lat_lon(
                    station["latitude"], station["longitude"],
                    domain.rotation_axis,
                    -1.0 * domain.rotation_angle_in_degree))
            else:
                p = Point(station["latitude"], station["longitude"])
            station_event_list.append((e_point, p))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = ["Progress: ", progressbar.Percentage(),
               progressbar.Bar(), "", progressbar.ETA()]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape,
                             lock, counter):
        new_bins = GreatCircleBinner(
            domain.min_latitude, domain.max_latitude,
            lat_lng_count, domain.min_longitude,
            domain.max_longitude, lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out
    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(
        domain.min_latitude, domain.max_latitude,
        lat_lng_count, domain.min_longitude,
        domain.max_longitude, lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(multiprocessing.Process(
            target=great_circle_binning, args=(chunks[_i], collected_bins_data,
                                               collected_bins.bins.shape, lock,
                                               counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    stations = chain.from_iterable((
        _i[1].values() for _i in station_events if _i[1]))
    # Remove duplicates
    stations = [(_i["latitude"], _i["longitude"]) for _i in stations]
    stations = set(stations)
    title = "%i Events, %i unique raypaths, "\
            "%i unique stations" % (len(station_events), circle_count,
                                    len(stations))
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(np.clip(data, a_min=0.5, a_max=data.max()))
        data[data >= 0.0] += 0.1
        data[data < 0.0] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120) ** 2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=2)

    lngs, lats = collected_bins.coordinates
    # Rotate back if necessary!
    if domain.rotation_angle_in_degree:
        for lat, lng in zip(lats, lngs):
            lat[:], lng[:] = rotations.rotate_lat_lon(
                lat, lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val,
                          zorder=10)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines(zorder=3)
    map_object.drawcountries(linewidth=0.2, zorder=3)
Example #7
0
def plot_raydensity(map_object, station_events, min_lat, max_lat, min_lng,
                    max_lng, rot_axis, rot_angle):
    """
    Create a ray-density plot for all events and all stations.

    This function is potentially expensive and will use all CPUs available.
    Does require geographiclib to be installed.
    """
    import ctypes as C
    from lasif.tools.great_circle_binner import GreatCircleBinner, Point
    import multiprocessing
    import progressbar
    from scipy.stats import scoreatpercentile

    bounds = rotations.get_max_extention_of_domain(
        min_lat,
        max_lat,
        min_lng,
        max_lng,
        rotation_axis=rot_axis,
        rotation_angle_in_degree=rot_angle)

    # Merge everything so that a list with coordinate pairs is created. This
    # list is then distributed among all processors.
    station_event_list = []
    for event, stations in station_events:
        org = event.preferred_origin() or event.origins[0]
        e_point = Point(org.latitude, org.longitude)
        for station in stations.itervalues():
            station_event_list.append(
                (e_point, Point(station["latitude"], station["longitude"])))

    circle_count = len(station_event_list)

    # The granularity of the latitude/longitude discretization for the
    # raypaths. Attempt to get a somewhat meaningful result in any case.
    lat_lng_count = 1000
    if circle_count < 1000:
        lat_lng_count = 1000
    if circle_count < 10000:
        lat_lng_count = 2000
    else:
        lat_lng_count = 3000

    cpu_count = multiprocessing.cpu_count()

    def to_numpy(raw_array, dtype, shape):
        data = np.frombuffer(raw_array.get_obj())
        data.dtype = dtype
        return data.reshape(shape)

    print "\nLaunching %i greatcircle calculations on %i CPUs..." % \
        (circle_count, cpu_count)

    widgets = [
        "Progress: ",
        progressbar.Percentage(),
        progressbar.Bar(), "",
        progressbar.ETA()
    ]
    pbar = progressbar.ProgressBar(widgets=widgets,
                                   maxval=circle_count).start()

    def great_circle_binning(sta_evs, bin_data_buffer, bin_data_shape, lock,
                             counter):
        new_bins = GreatCircleBinner(bounds["minimum_latitude"],
                                     bounds["maximum_latitude"], lat_lng_count,
                                     bounds["minimum_longitude"],
                                     bounds["maximum_longitude"],
                                     lat_lng_count)
        for event, station in sta_evs:
            with lock:
                counter.value += 1
            if not counter.value % 25:
                pbar.update(counter.value)
            new_bins.add_greatcircle(event, station)

        bin_data = to_numpy(bin_data_buffer, np.uint32, bin_data_shape)
        with bin_data_buffer.get_lock():
            bin_data += new_bins.bins

    # Split the data in cpu_count parts.
    def chunk(seq, num):
        avg = len(seq) / float(num)
        out = []
        last = 0.0
        while last < len(seq):
            out.append(seq[int(last):int(last + avg)])
            last += avg
        return out

    chunks = chunk(station_event_list, cpu_count)

    # One instance that collects everything.
    collected_bins = GreatCircleBinner(bounds["minimum_latitude"],
                                       bounds["maximum_latitude"],
                                       lat_lng_count,
                                       bounds["minimum_longitude"],
                                       bounds["maximum_longitude"],
                                       lat_lng_count)

    # Use a multiprocessing shared memory array and map it to a numpy view.
    collected_bins_data = multiprocessing.Array(C.c_uint32,
                                                collected_bins.bins.size)
    collected_bins.bins = to_numpy(collected_bins_data, np.uint32,
                                   collected_bins.bins.shape)

    # Create, launch and join one process per CPU. Use a shared value as a
    # counter and a lock to avoid race conditions.
    processes = []
    lock = multiprocessing.Lock()
    counter = multiprocessing.Value("i", 0)
    for _i in xrange(cpu_count):
        processes.append(
            multiprocessing.Process(target=great_circle_binning,
                                    args=(chunks[_i], collected_bins_data,
                                          collected_bins.bins.shape, lock,
                                          counter)))
    for process in processes:
        process.start()
    for process in processes:
        process.join()

    pbar.finish()

    title = "%i Events with %i recorded 3 component waveforms" % (
        len(station_events), circle_count)
    #plt.gca().set_title(title, size="large")
    plt.title(title, size="xx-large")

    data = collected_bins.bins.transpose()

    if data.max() >= 10:
        data = np.log10(data)
        data += 0.1
        data[np.isinf(data)] = 0.0
        max_val = scoreatpercentile(data.ravel(), 99)
    else:
        max_val = data.max()

    cmap = cm.get_cmap("gist_heat")
    cmap._init()
    cmap._lut[:120, -1] = np.linspace(0, 1.0, 120)**2

    # Slightly change the appearance of the map so it suits the rays.
    map_object.drawmapboundary(fill_color='#bbbbbb')
    map_object.fillcontinents(color='#dddddd', lake_color='#dddddd', zorder=0)

    lngs, lats = collected_bins.coordinates
    ln, la = map_object(lngs, lats)
    map_object.pcolormesh(ln, la, data, cmap=cmap, vmin=0, vmax=max_val)
    # Draw the coastlines so they appear over the rays. Otherwise things are
    # sometimes hard to see.
    map_object.drawcoastlines()
    map_object.drawcountries(linewidth=0.2)
    map_object.drawmeridians(np.arange(0, 360, 30))
    map_object.drawparallels(np.arange(-90, 90, 30))