Exemplo n.º 1
0
 def make_heartbeat(self, session=None):
     '''
     Make an Event row that will only trigger a notification for
     groups with a heartbeat group specification
     '''
     last_hb = session.query(Event).filter(Event.event_id == 'heartbeat').all()
     make_hb = False
     if last_hb:
         if time.time() > (last_hb[-1].time) + 24*60*60:
             make_hb = True
     else:
         make_hb = True
             
     if make_hb is True:
         e = Event()
         e.time = time.time()
         e.event_id = 'heartbeat'
         e.magnitude = 10
         e.lat = 0
         e.lon = 0
         e.title = 'ShakeCast Heartbeat'
         e.place = 'ShakeCast is running'
         e.status = 'new'
         e.directory_name = os.path.join(self.data_dir,
                                            e.event_id)
         session.add(e)
         session.commit()
         
         self.get_event_map(e)
Exemplo n.º 2
0
    def make_heartbeat(self, session=None):
        '''
        Make an Event row that will only trigger a notification for
        groups with a heartbeat group specification
        '''
        last_hb = session.query(Event).filter(
            Event.event_id == 'heartbeat').all()
        make_hb = False
        if last_hb:
            if time.time() > (last_hb[-1].time) + 24 * 60 * 60:
                make_hb = True
        else:
            make_hb = True

        if make_hb is True:
            e = Event(event_id='heartbeat', save=True)
            e.time = time.time()
            e.magnitude = 10
            e.lat = 0
            e.lon = 0
            e.title = 'ShakeCast Heartbeat'
            e.place = 'ShakeCast is running'
            e.status = 'new'
            e.type = 'heartbeat'

            session.add(e)
            session.commit()

            self.get_event_map(e)
Exemplo n.º 3
0
def grab_from_directory(directory, session=None):
    info_loc = os.path.join(directory, 'info.json')

    error = ''
    log = ''
    try:
        with open(info_loc, 'r') as info_file:
            info = json.loads(info_file.read())

        # Load shakemap grid to get extra info
        grid_loc = os.path.join(directory, 'grid.xml')
        grid = ShakeMapGrid()
        grid.load(grid_loc)

    except Exception as e:
        error = str(e)
        log = error

    event_info = info['input']['event_information']

    # make timestamp
    dt = datetime.datetime.strptime(event_info['origin_time'],
                                    '%Y-%d-%mT%H:%M:%SZ')
    timestamp = time.mktime(dt.timetuple())

    event = Event(status='new',
                  event_id=grid.event_id,
                  title='M {} - {}'.format(event_info['magnitude'],
                                           event_info['location']),
                  place=event_info['location'],
                  time=timestamp,
                  magnitude=event_info['magnitude'],
                  lon=event_info['longitude'],
                  lat=event_info['latitude'],
                  depth=event_info['depth'])

    session.add(event)

    proc = info['processing']
    shakemap = ShakeMap(
        status='new',
        event=event,
        shakemap_id=grid.event_id,
        lat_min=grid.lat_min,
        lat_max=grid.lat_max,
        lon_min=grid.lon_min,
        lon_max=grid.lon_max,
        generation_timestamp=proc['shakemap_versions']['process_time'],
        recieve_timestamp=time.time())

    session.add(shakemap)

    session.commit()

    return {
        'status': 'finished',
        'error': error,
        'log': log,
        'message': 'File scrape for new earthquakes'
    }
Exemplo n.º 4
0
def _refresh_from_file(file, src, sess):
    logging.debug(file.path)
    new_events = []
    logfile = sess.query(Logfile).get(file.path)
    if logfile == None:
        logfile = Logfile(path=file.path, offset=0)
        sess.add(logfile)

    with open(logfile.path, 'rb') as f:
        logging.debug('offset: {}'.format(logfile.offset))
        f.seek(logfile.offset)
        iter = 0
        for line in f:
            try:
                data = utils.logline_to_dict(line.decode())
                if not ('type' in data and data['type'] == 'crash'):
                    if 'milestone' in data:
                        event = Event(type=EventType.milestone,
                                      data=json.dumps(data),
                                      time=utils.crawl_date_to_datetime(
                                          data['time']),
                                      src_abbr=src.name)
                    else:
                        event = Event(type=EventType.game,
                                      data=json.dumps(data),
                                      time=utils.crawl_date_to_datetime(
                                          data['end']),
                                      src_abbr=src.name)
                    sess.add(event)
                    if len(
                            new_events
                    ) < 100:  # don't want to do huge sends over sockets TODO: make a config option
                        new_events.append(event)
            except KeyError as e:
                logging.error('key {} not found'.format(e))
            except Exception as e:  # how scandalous! Don't want one broken line to break everything
                logging.exception(
                    'Something unexpected happened, skipping this event')
            iter += 1
            logfile.offset += len(line)
            if iter % 1000 == 0:  # don't spam commits
                sess.commit()
        logfile.offset = f.tell()
        sess.commit()
        return new_events
Exemplo n.º 5
0
    def get_new_events(self, session=None, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        sc = SC()

        event_str = ''
        new_events = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]
            
            # ignore info from unfavorable networks and low mag eqs
            if (eq['properties']['net'] in self.ignore_nets or
                    eq['properties']['mag'] < sc.new_eq_mag_cutoff):
                continue
            
            # get event id and all ids
            event = Event()
            event.all_event_ids = eq['properties']['ids']
            if scenario is False:
                event.event_id = eq_id
            else:
                event.event_id = eq_id + '_scenario'
                event.all_event_ids = event.event_id
            
            # use id and all ids to determine if the event is new and
            # query the old event if necessary
            old_shakemaps = []
            old_notifications = []
            if event.is_new() is False:
                event.status = 'processed'
                ids = event.all_event_ids.strip(',').split(',')
                old_events = [(session.query(Event)
                                .filter(Event.event_id == each_id)
                                .first())
                                    for each_id in ids]
                
                # remove older events
                for old_event in old_events:
                    if old_event is not None:
                        old_notifications += old_event.notifications
                        old_shakemaps += old_event.shakemaps
                        
                        # if one of these old events hasn't had
                        # notifications sent, this event should be sent
                        if old_event.status == 'new':
                            event.status = 'new'
                        session.delete(old_event)
            else:
                event.status = 'new'

            # over ride new status if scenario
            if scenario is True:
                event.status = 'scenario'
                        
            # Fill the rest of the event info
            event.directory_name = os.path.join(self.data_dir,
                                                event.event_id)
            event.title = self.earthquakes[eq_id]['properties']['title']
            event.place = self.earthquakes[eq_id]['properties']['place']
            event.time = self.earthquakes[eq_id]['properties']['time']/1000.0
            event.magnitude = eq['properties']['mag']
            event_coords = self.earthquakes[eq_id]['geometry']['coordinates']
            event.lon = event_coords[0]
            event.lat = event_coords[1]
            event.depth = event_coords[2]
            
            # determine whether or not an event should be kept
            # based on group definitions
            keep_event = False
            groups = session.query(Group).all()
            if len(groups) > 0:
                for group in groups:
                    if group.point_inside(event):
                        keep_event = True
            else:
                keep_event = True
            
            if keep_event is False:
                continue

            if old_shakemaps:
                event.shakemaps = old_shakemaps
            if old_notifications:
                event.notifications = old_notifications

            session.add(event)
            session.commit()
            
            self.get_event_map(event)
            
            # add the event to the return list and add info to the
            # return string
            new_events += [event]
            event_str += 'Event: %s\n' % event.event_id
        
        # print event_str
        return new_events, event_str
Exemplo n.º 6
0
    def make_heartbeat(self):
        '''
        Make an Event row that will only trigger a notification for
        groups with a heartbeat group_specification
        '''
        session = Session()
        last_hb = session.query(Event).filter(
            Event.event_id == 'heartbeat').all()
        make_hb = False
        if last_hb:
            if time.time() > (last_hb[-1].time) + 24 * 60 * 60:
                make_hb = True
        else:
            make_hb = True

        if make_hb is True:
            e = Event()
            e.time = time.time()
            e.event_id = 'heartbeat'
            e.magnitude = 10
            e.lat = 1000
            e.lon = 1000
            e.title = 'ShakeCast Heartbeat'
            e.place = 'ShakeCast is running'
            e.status = 'new'
            e.directory_name = os.path.join(self.data_dir, e.event_id)
            session.add(e)
            session.commit()

            self.get_event_map(e)

        Session.remove()
Exemplo n.º 7
0
    def get_new_events(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        sc = SC()

        event_str = ''
        new_events = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            # ignore info from unfavorable networks and low mag eqs
            if (eq['properties']['net'] in self.ignore_nets
                    or eq['properties']['mag'] < sc.new_eq_mag_cutoff):
                continue

            # get event id and all ids
            event = Event()
            event.all_event_ids = eq['properties']['ids']
            if scenario is False:
                event.event_id = eq_id
            else:
                event.event_id = eq_id + '_scenario'
                event.all_event_ids = event.event_id

            # use id and all ids to determine if the event is new and
            # query the old event if necessary
            old_shakemaps = []
            old_notifications = []
            if event.is_new() is False:
                event.status = 'processed'
                ids = event.all_event_ids.strip(',').split(',')
                old_events = [(session.query(Event).filter(
                    Event.event_id == each_id).first()) for each_id in ids]

                # remove older events
                for old_event in old_events:
                    if old_event is not None:
                        old_notifications += old_event.notifications
                        old_shakemaps += old_event.shakemaps

                        # if one of these old events hasn't had
                        # notifications sent, this event should be sent
                        if old_event.status == 'new':
                            event.status = 'new'
                        session.delete(old_event)
            else:
                event.status = 'new'

            # over ride new status if scenario
            if scenario is True:
                event.status = 'scenario'

            # Fill the rest of the event info
            event.directory_name = os.path.join(self.data_dir, event.event_id)
            event.title = self.earthquakes[eq_id]['properties']['title']
            event.place = self.earthquakes[eq_id]['properties']['place']
            event.time = self.earthquakes[eq_id]['properties']['time'] / 1000.0
            event.magnitude = eq['properties']['mag']
            event_coords = self.earthquakes[eq_id]['geometry']['coordinates']
            event.lon = event_coords[0]
            event.lat = event_coords[1]
            event.depth = event_coords[2]

            if old_shakemaps:
                event.shakemaps = old_shakemaps
            if old_notifications:
                event.notifications = old_notifications

            session.add(event)
            session.commit()

            self.get_event_map(event)

            # add the event to the return list and add info to the
            # return string
            new_events += [event]
            event_str += 'Event: %s\n' % event.event_id

        Session.remove()
        print event_str
        return new_events, event_str
Exemplo n.º 8
0
    def get_new_events(self, session=None, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        sc = SC()

        event_str = ''
        new_events = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            # ignore info from unfavorable networks and low mag eqs
            if (eq['properties']['net'] in self.ignore_nets
                    or eq['properties']['mag'] < sc.new_eq_mag_cutoff):
                continue

            # get event id and all ids
            event = Event()
            event.all_event_ids = eq['properties']['ids']
            if scenario is False:
                event.event_id = eq_id
            else:
                event.event_id = eq_id + '_scenario'
                event.all_event_ids = event.event_id

            # use id and all ids to determine if the event is new and
            # query the old event if necessary
            old_shakemaps = []
            old_notifications = []
            if event.is_new() is False:
                event.status = 'processed'
                ids = event.all_event_ids.strip(',').split(',')
                old_events = [(session.query(Event).filter(
                    Event.event_id == each_id).first()) for each_id in ids]

                # remove older events
                for old_event in old_events:
                    if old_event is not None:
                        old_notifications += old_event.notifications
                        old_shakemaps += old_event.shakemaps

                        # if one of these old events hasn't had
                        # notifications sent, this event should be sent
                        if old_event.status == 'new':
                            event.status = 'new'
                        session.delete(old_event)
            else:
                event.status = 'new'

            # Fill the rest of the event info
            event.title = self.earthquakes[eq_id]['properties']['title']
            event.place = self.earthquakes[eq_id]['properties']['place']
            event.time = self.earthquakes[eq_id]['properties']['time'] / 1000.0
            event.magnitude = eq['properties']['mag']
            event_coords = self.earthquakes[eq_id]['geometry']['coordinates']
            event.lon = event_coords[0]
            event.lat = event_coords[1]
            event.depth = event_coords[2]
            event.type = 'scenario' if scenario is True else 'event'

            # determine whether or not an event should be kept
            # based on group definitions. Should always be true for scenario runs
            keep_event = scenario
            groups = session.query(Group).all()
            if len(groups) > 0:
                for group in groups:
                    if group.point_inside(event):
                        keep_event = True
            else:
                keep_event = True

            if keep_event is False:
                continue

            if old_shakemaps:
                event.shakemaps = old_shakemaps
            if old_notifications:
                event.notifications = old_notifications

            session.add(event)
            session.commit()

            self.get_event_map(event)

            # add the event to the return list and add info to the
            # return string
            new_events += [event]
            event_str += 'Event: %s\n' % event.event_id

        # print event_str
        return new_events, event_str
Exemplo n.º 9
0
def grab_from_directory(directory, session=None):
    """
    Import an event and shakemap from a local directory

    There are no naming convensions for the directory, but it must
    contain an info.json and grid.xml shakemap product. Event and
    ShakeMap database entries will be created as Scenario products
    and the event can be triggered through the UI as a scenario
    """
    info_loc = os.path.join(directory, 'info.json')

    error = ''
    log = ''
    try:
        with open(info_loc, 'r') as info_file:
            info = json.loads(info_file.read())

        # Load shakemap grid to get extra info
        grid_loc = os.path.join(directory, 'grid.xml')
        grid = ShakeMapGrid()
        grid.load(grid_loc)

    except Exception as e:
        error = str(e)
        log = error
        raise (e)

    event_info = info['input']['event_information']

    # make timestamp
    try:
        dt = datetime.datetime.strptime(event_info['origin_time'],
                                        '%Y-%d-%mT%H:%M:%SZ')
        timestamp = time.mktime(dt.timetuple())
    except Exception:
        # can't parse the timestamp... just use current time
        timestamp = time.time()

    event = Event(status='scenario',
                  event_id=grid.event_id,
                  title='M {} - {}'.format(event_info['magnitude'],
                                           event_info['location']),
                  place=event_info['location'],
                  time=timestamp,
                  magnitude=event_info['magnitude'],
                  lon=event_info['longitude'],
                  lat=event_info['latitude'],
                  depth=event_info['depth'],
                  override_directory=directory,
                  type='scenario',
                  save=True)

    session.add(event)

    proc = info['processing']
    shakemap = ShakeMap(
        status='scenario',
        event=event,
        shakemap_id=grid.event_id,
        lat_min=grid.lat_min,
        lat_max=grid.lat_max,
        lon_min=grid.lon_min,
        lon_max=grid.lon_max,
        generation_timestamp=proc['shakemap_versions']['process_time'],
        recieve_timestamp=time.time(),
        override_directory=directory,
        shakemap_version=proc['shakemap_versions']['map_version'],
        type='scenario',
        save=True)

    session.add(shakemap)
    session.commit()

    return event, shakemap