コード例 #1
0
def _attempt_scrape(semester_uq_id, course_code):
    try:
        semester = Session().query(
            models.Semester
        ).filter(
            models.Semester.uq_id == semester_uq_id
        ).one()
    except NoResultFound:
        raise APINotFoundException("Semester doesn't exist")

    # Don't scrape if we've already got it
    try:
        offering = Session().query(
            models.Offering
        ).join(
            models.Course,
            models.Offering.course_id == models.Course.id,
        ).filter(
            models.Course.course_code == course_code,
            models.Offering.semester_id == semester.id,
        ).one()
        # If we got to here, the offering has already been scraped and we should abort
        raise APIFailureException("Offering has already been scraped")
    except NoResultFound:
        # this is what we want
        pass

    # Actual scrape
    assessment_items = scraper.scrape_assessment(semester.semester, course_code)

    try:
        course = Session().query(
            models.Course
        ).filter(
            models.Course.course_code == course_code
        ).one()
    except NoResultFound:
        course = models.Course(course_code)
        Session().add(course)
        Session().commit()

    session = Session()

    offering = models.Offering(
        course_id=course.id,
        semester_id=semester.id,
    )
    session.add(offering)
    session.flush()

    # Add assessment items
    for item in assessment_items:
        session.add(models.AssessmentItem(
            offering_id=offering.id,
            task_name=item['task'],
            weight=item['weight'],
        ))
    session.commit()

    return offering
コード例 #2
0
    def make_heartbeat(self):
        '''
        Make an Event row that will only trigger a notification for
        groups with a heartbeat group_specification
        '''
        session = Session()
        last_hb = session.query(Event).filter(
            Event.event_id == 'heartbeat').all()
        make_hb = False
        if last_hb:
            if time.time() > (last_hb[-1].time) + 24 * 60 * 60:
                make_hb = True
        else:
            make_hb = True

        if make_hb is True:
            e = Event()
            e.time = time.time()
            e.event_id = 'heartbeat'
            e.magnitude = 10
            e.lat = 1000
            e.lon = 1000
            e.title = 'ShakeCast Heartbeat'
            e.place = 'ShakeCast is running'
            e.status = 'new'
            e.directory_name = os.path.join(self.data_dir, e.event_id)
            session.add(e)
            session.commit()

            self.get_event_map(e)

        Session.remove()
コード例 #3
0
ファイル: model.py プロジェクト: sfioritto/bigbrother
def get_whorl_identities(whorls, identity):

    db = Session()

    wis = []
    for whorl in whorls:
        try:
            wi = db.query(WhorlIdentity).filter_by(whorl_hashed=whorl.hashed).filter_by(identity_id=identity.id).one()

        except NoResultFound:
            wi = WhorlIdentity(whorl_hashed=whorl.hashed, identity_id=identity.id)
            db.add(wi)
            db.flush()
        wis.append(wi)

    return wis
コード例 #4
0
ファイル: model.py プロジェクト: sfioritto/bigbrother
def create_get_whorls(rawdata):

    whorls = []
    db = Session()

    for key, value, hashed in create_hashes(rawdata):
        try:
            whorl = db.query(Whorl).filter_by(hashed=hashed).one()

        except NoResultFound:
            whorl = Whorl(hashed=hashed, key=key, value=value)
            db.add(whorl)
            db.flush()

        whorls.append(whorl)

    return whorls
コード例 #5
0
ファイル: main.py プロジェクト: nonsene/training-path
web_page = requests.get(url)
web_page_json = web_page.json()

status = web_page.status_code

query = session.query(User).filter(User.username == username)

if status == 200:
    if query.first() is None:
        print('\n\nUser not exists... Creating a new one!\n\n')
        sleep(2)
        for i in web_page_json:
            print(i['name'])
            insert_ = User(username, i['name'], date.today())
            session.add(insert_)
        print('\nAdding this repositories ^')
        sleep(2)
        session.commit()
    else:
        print(
            "\n\nUser already exists... Modifying the old one if it's the case!\n\n"
        )
        sleep(2)
        query.delete()
        for i in web_page_json:
            print(i['name'])
            insert_ = User(username, i['name'], date.today())
            session.add(insert_)
        print('\nAdding this repositories ^')
        session.commit()
コード例 #6
0
    def get_new_events(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        sc = SC()

        event_str = ''
        new_events = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            # ignore info from unfavorable networks and low mag eqs
            if (eq['properties']['net'] in self.ignore_nets
                    or eq['properties']['mag'] < sc.new_eq_mag_cutoff):
                continue

            # get event id and all ids
            event = Event()
            event.all_event_ids = eq['properties']['ids']
            if scenario is False:
                event.event_id = eq_id
            else:
                event.event_id = eq_id + '_scenario'
                event.all_event_ids = event.event_id

            # use id and all ids to determine if the event is new and
            # query the old event if necessary
            old_shakemaps = []
            old_notifications = []
            if event.is_new() is False:
                event.status = 'processed'
                ids = event.all_event_ids.strip(',').split(',')
                old_events = [(session.query(Event).filter(
                    Event.event_id == each_id).first()) for each_id in ids]

                # remove older events
                for old_event in old_events:
                    if old_event is not None:
                        old_notifications += old_event.notifications
                        old_shakemaps += old_event.shakemaps

                        # if one of these old events hasn't had
                        # notifications sent, this event should be sent
                        if old_event.status == 'new':
                            event.status = 'new'
                        session.delete(old_event)
            else:
                event.status = 'new'

            # over ride new status if scenario
            if scenario is True:
                event.status = 'scenario'

            # Fill the rest of the event info
            event.directory_name = os.path.join(self.data_dir, event.event_id)
            event.title = self.earthquakes[eq_id]['properties']['title']
            event.place = self.earthquakes[eq_id]['properties']['place']
            event.time = self.earthquakes[eq_id]['properties']['time'] / 1000.0
            event.magnitude = eq['properties']['mag']
            event_coords = self.earthquakes[eq_id]['geometry']['coordinates']
            event.lon = event_coords[0]
            event.lat = event_coords[1]
            event.depth = event_coords[2]

            if old_shakemaps:
                event.shakemaps = old_shakemaps
            if old_notifications:
                event.notifications = old_notifications

            session.add(event)
            session.commit()

            self.get_event_map(event)

            # add the event to the return list and add info to the
            # return string
            new_events += [event]
            event_str += 'Event: %s\n' % event.event_id

        Session.remove()
        print event_str
        return new_events, event_str
コード例 #7
0
    def get_new_shakemaps(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        url_opener = URLOpener()

        shakemap_str = ''
        new_shakemaps = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            if scenario is False:
                eq_url = eq['properties']['detail']
                try:
                    eq_str = url_opener.open(eq_url)
                except:
                    self.log += 'Bad EQ URL: {0}'.format(eq_id)
                try:
                    eq_info = json.loads(eq_str)
                except Exception as e:
                    eq_info = e.partial
            else:
                eq_info = eq

            # check if the event has a shakemap
            if ('shakemap' not in eq_info['properties']['products'].keys()
                    and 'shakemap-scenario'
                    not in eq_info['properties']['products'].keys()):
                continue

            # pulls the first shakemap associated with the event
            shakemap = ShakeMap()

            if scenario is False:
                shakemap.shakemap_id = eq_id
            else:
                shakemap.shakemap_id = eq_id + '_scenario'

            if 'shakemap-scenario' in eq_info['properties']['products'].keys():
                sm_str = 'shakemap-scenario'
            else:
                sm_str = 'shakemap'

            # which shakemap has the highest weight
            weight = 0
            for idx in xrange(len(eq_info['properties']['products'][sm_str])):
                if eq_info['properties']['products'][sm_str][idx][
                        'preferredWeight'] > weight:
                    weight = eq_info['properties']['products'][sm_str][idx][
                        'preferredWeight']
                    shakemap_json = eq_info['properties']['products'][sm_str][
                        idx]

            shakemap.shakemap_version = shakemap_json['properties']['version']

            # check if we already have the shakemap
            if shakemap.is_new() is False:
                shakemap = (session.query(ShakeMap).filter(
                    ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                        ShakeMap.shakemap_version ==
                        shakemap.shakemap_version).first())

            # Check for new shakemaps without statuses; git them a
            # status so we know what to do with them later
            if shakemap.status is None:
                shakemap.status = 'downloading'
            session.add(shakemap)
            session.commit()

            # depricate previous unprocessed versions of the ShakeMap
            dep_shakemaps = (session.query(ShakeMap).filter(
                ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                    ShakeMap.status == 'new')).all()
            for dep_shakemap in dep_shakemaps:
                dep_shakemap.status = 'depricated'

            # assign relevent information to shakemap
            shakemap.map_status = shakemap_json['properties']['map-status']
            shakemap.region = shakemap_json['properties']['eventsource']
            shakemap.lat_max = shakemap_json['properties']['maximum-latitude']
            shakemap.lat_min = shakemap_json['properties']['minimum-latitude']
            shakemap.lon_max = shakemap_json['properties']['maximum-longitude']
            shakemap.lon_min = shakemap_json['properties']['minimum-longitude']
            shakemap.generation_timestamp = shakemap_json['properties'][
                'process-timestamp']
            shakemap.recieve_timestamp = time.time()

            # make a directory for the new event
            shakemap.directory_name = os.path.join(
                self.data_dir, shakemap.shakemap_id,
                shakemap.shakemap_id + '-' + str(shakemap.shakemap_version))
            if not os.path.exists(shakemap.directory_name):
                os.makedirs(shakemap.directory_name)

            # Try to download all prefered products
            for product_name in self.pref_products:
                # if we already have a good version of this product
                # just skip it
                if shakemap.has_products([product_name]):
                    continue

                existing_prod = (session.query(Product).filter(
                    Product.shakemap_id == shakemap.shakecast_id).filter(
                        Product.product_type == product_name)).all()

                if existing_prod:
                    product = existing_prod[0]
                else:
                    product = Product(shakemap=shakemap,
                                      product_type=product_name)

                try:
                    product.json = shakemap_json['contents']['download/%s' %
                                                             product_name]
                    product.url = product.json['url']

                    # download and allow partial products
                    product.str_ = url_opener.open(product.url)

                    # determine if we're writing binary or not
                    if product_name.lower().endswith(
                        ('.png', '.jpg', '.jpeg')):
                        mode = 'wb'
                    else:
                        mode = 'wt'

                    product.file_ = open(
                        '%s%s%s' %
                        (shakemap.directory_name, self.delim, product_name),
                        mode)
                    product.file_.write(product.str_)
                    product.file_.close()

                    product.error = None
                    product.status = 'downloaded'
                except Exception as e:
                    product.status = 'download failed'
                    product.error = '{}: {}'.format(type(e), e)
                    self.log += 'Failed to download: %s %s' % (eq_id,
                                                               product_name)

            # check for event whose id or one of its old ids matches the shakemap id
            if scenario is False:
                event = session.query(Event).filter(
                    Event.all_event_ids.contains(shakemap.shakemap_id)).all()
            else:
                event = session.query(Event).filter(
                    Event.event_id == shakemap.shakemap_id).all()

            if event:
                event = event[0]
                event.shakemaps.append(shakemap)

            if (scenario is False and shakemap.has_products(self.req_products)
                    and shakemap.status == 'downloading'):
                shakemap.status = 'new'
            elif scenario is True:
                shakemap.status = 'scenario'

            session.commit()

            new_shakemaps += [shakemap]
            shakemap_str += 'Wrote %s to disk.\n' % shakemap.shakemap_id

        self.log += shakemap_str
        Session.remove()
        return new_shakemaps, shakemap_str
コード例 #8
0
ファイル: model.py プロジェクト: sfioritto/bigbrother
def create_identity(name):
    db = Session()
    identity = Identity(name=name)
    db.add(identity)
    db.flush()
    return identity