Exemple #1
0
    def _get_edges_rids_opa(event):
        """
        Return a list of edges (resource_id_parent, resource_id_child),
        a set of rejected resource_ids and a dict of
        {resource_id: (obj, parent, attr)}.
        """

        # {status: [obj, ]}
        edges = []  # list of tuples (parent, child)
        # {resource_id, }
        rejected_rid = set()
        # {resource_id: [obj, parent, attr]}
        rejected_opa = {}
        # first make a list objects with eval status as well as all other
        # objects they refer to, using resource_ids
        opa_iter = yield_obj_parent_attr(event, has_attr="evaluation_status")
        for obj, parent, attr in opa_iter:
            rid = str(obj.resource_id)
            is_rejected = obj.evaluation_status == "rejected"
            if is_rejected:
                rejected_rid.add(rid)
                rejected_opa[rid] = (obj, parent, attr)
            # now recurse object and find all objects this one refers to
            opa_iter_2 = yield_obj_parent_attr(obj, cls=ev.ResourceIdentifier)
            for obj2, _, _ in opa_iter_2:
                edges.append((rid, str(obj2)))
        return edges, rejected_rid, rejected_opa
Exemple #2
0
def _events_to_tables(
    event_iterable: Union[obspy.Catalog, Sequence[ev.Event]]
) -> Dict[str, pd.DataFrame]:
    """ Create tables from an event iterable """
    obj_dict = defaultdict(list)
    seen_ids = set()

    def _obj_to_dict(obj, parent_id=None, event_id=None):
        """ convert objects to flat dicts for conversion to pandas tables. """
        # dont process anything twice, only obj with resource_ids go in tables
        if id(obj) in seen_ids or not hasattr(obj, "resource_id"):
            return
        # get the expected datatypes and fields, then extract values into dict
        name = getattr(obj, "__name__", type(obj).__name__)
        obj_dict[name].append(_get_values(obj, parent_id, event_id))

    for event in event_iterable:
        event_id = str(event.resource_id)
        for obj, parent, attr in yield_obj_parent_attr(event):
            parent_id = getattr(parent, "resource_id", None)
            _obj_to_dict(obj, parent_id=parent_id, event_id=event_id)

    obj_dict.pop("ResourceIdentifier", None)

    tables = {
        i: pd.DataFrame(v).set_index("resource_id")
        for i, v in obj_dict.items()
    }
    tables["ID"] = _create_resource_id_tables(tables)
    return tables
Exemple #3
0
def attach_all_resource_ids(event: Event):
    """ recurse all objects in a events and set referred objects """
    rid_to_object = {}
    # first pass, bind all resource ids to parent
    for rid, parent, attr in yield_obj_parent_attr(event, ResourceIdentifier):
        if attr == "resource_id":
            # if the object has already been set and is not unique, raise
            rid.set_referred_object(parent)
            if rid.id in rid_to_object:
                assert rid.get_referred_object() is rid_to_object[rid.id]
            # else set referred object
            rid_to_object[rid.id] = parent
    # second pass, bind all other resource ids to correct resource ids
    for rid, parent, attr in yield_obj_parent_attr(event, ResourceIdentifier):
        if attr != "resource_id" and rid.id in rid_to_object:
            rid.set_referred_object(rid_to_object[rid.id])
Exemple #4
0
 def prune_catalog(self, catalog):
     """ recurse a events and set all attrs that eval to False to None.
     This is needed to overcome some Catalog oddities to fairly compare two
     catalogs. """
     skips = (obspy.UTCDateTime, ev.ResourceIdentifier)
     cat = catalog.copy()
     for obj, parent, attr in yield_obj_parent_attr(cat):
         if isinstance(obj, skips):
             continue
         for item, val in obj.__dict__.items():
             setattr(obj, item, val or None)
     return cat
Exemple #5
0
 def simple_catalog_to_merge(self, bingham_catalog):
     """
     Create a simple catalog to merge into bingham_cat using only one event.
     """
     cat = obspy.Catalog(events=bingham_catalog[:2]).copy()
     # drop first pick
     cat[0].picks = cat[0].picks[1:]
     # modify the picks to whole seconds, reset pick IDS
     for pick, _, _ in yield_obj_parent_attr(cat, ev.Pick):
         pick.time -= (pick.time.timestamp) % 1
         pick.id = ev.ResourceIdentifier(referred_object=pick)
     return cat
Exemple #6
0
def _remove_empty_quantity_errors(catalog):
    """
    Copy the catalog and set all empty QunatityErrors to None.
    This is needed to evaluate equality of catalogs that may have
    None or empty QuantityErrors.

    Fixed in https://github.com/obspy/obspy/pull/2185
    """
    cat = catalog.copy()
    for obj, parent, attr in yield_obj_parent_attr(cat, cls=ev.QuantityError):
        if not obj:
            setattr(parent, attr, None)
    return cat
Exemple #7
0
 def simple_catalog_to_merge(self, bingham_catalog):
     """
     Create a simple catalog to merge into bingham_cat using only two event.
     """
     events = sorted(bingham_catalog, key=get_reference_time)
     cat = obspy.Catalog(events=events[:2]).copy()
     # drop first pick
     cat[0].picks = cat[0].picks[1:]
     # modify the picks to whole seconds, reset pick IDS
     for pick, _, _ in yield_obj_parent_attr(cat, ev.Pick):
         nearest_second = np.round(pick.time.timestamp)
         pick.time = obspy.UTCDateTime(nearest_second)
         # pick.resource_id = ev.ResourceIdentifier(referred_object=pick)
     return cat
Exemple #8
0
def duplicate_events(
    event: catalog_or_event, resource_generator: Optional[Callable] = None
) -> catalog_or_event:
    """
    Duplicate an event.

    Creates a copy of the event, and assigns new resource IDs to all
    internal objects (ie picks, station_magnitudes, etc.). Will not
    assign new resource_id to those representing external resources
    (eg analyst, velocity models, etc.).

    Parameters
    ----------
    event
        The event to duplicate.
    resource_generator:
        A callable that can be used to generate resource IDs.
    """
    new = copy.deepcopy(event)
    gen_func = resource_generator or ResourceIdentifier
    id_map = {}  # map old id to new
    # first pass through bind resource id to parent if attr is resource_id
    for rid, parent, attr in yield_obj_parent_attr(new, ResourceIdentifier):
        if attr == "resource_id":
            try:
                prefix = rid.prefix
            except AttributeError:
                prefix = "smi:local"
            new_rid = gen_func(referred_object=parent, prefix=prefix)
            id_map[rid.id] = new_rid
            setattr(parent, attr, new_rid)
    # second pass, swap out resource ids with the same id as those in id_map
    for rid, parent, attr in yield_obj_parent_attr(new, ResourceIdentifier):
        if rid.id in id_map and attr != "resource_id":
            setattr(parent, attr, id_map[rid.id])
    return new
Exemple #9
0
 def test_catalog(self, null_catalog):
     """ ensure all nullish catalog chars are replaced """
     cat = obsplus.utils.replace_null_nlsc_codes(null_catalog.copy())
     for pick, _, _ in yield_obj_parent_attr(cat, cls=ev.Pick):
         wid = pick.waveform_id
         assert wid.location_code == ""
Exemple #10
0
 def test_nullish_codes_replaced(self, cat_nullish_nslc_codes):
     """ Nullish location codes should be replace with empty strings. """
     kwargs = dict(obj=cat_nullish_nslc_codes, cls=WaveformStreamID)
     for obj, _, _ in yield_obj_parent_attr(**kwargs):
         assert obj.location_code == ""
Exemple #11
0
def _decompose_generic(events):
    """ Decompose an event or a catalog. """
    out = defaultdict(list)
    for obj, parent, attr in yield_obj_parent_attr(events):
        out[type(obj)].append(obj)
    return out