Пример #1
0
def get_offering(semester_uq_id, course_code):
    try:
        offering = Session().query(
            models.Offering
        ).join(
            models.Semester,
            sa.and_(
                models.Offering.semester_id == models.Semester.id,
                models.Semester.uq_id == semester_uq_id,
            )
        ).join(
            models.Course,
            sa.and_(
                models.Offering.course_id == models.Course.id,
                models.Course.course_code == course_code,
            )
        ).one()

        return offering.serialise()
    except NoResultFound:
        # Didn't find anything existing, try scraping it
        try:
            offering = _attempt_scrape(semester_uq_id, course_code)
            return offering.serialise()
        except (scraper.ScraperException, APIException):
            # Scraping failed, probably just a bad request
            raise APINotFoundException("No offering for that course code and semester")
Пример #2
0
    def POST(self):

        partial = json.loads(web.data())
        rawdata = model.build_raw_data(partial, web.ctx.environ, web.ctx.ip)
        identity = model.create_user(partial["name"])
        whorls = model.create_get_whorls(rawdata)
        model.learn(whorls, identity)
        Session.commit()

        return ""
Пример #3
0
    def make_heartbeat(self):
        '''
        Make an Event row that will only trigger a notification for
        groups with a heartbeat group_specification
        '''
        session = Session()
        last_hb = session.query(Event).filter(
            Event.event_id == 'heartbeat').all()
        make_hb = False
        if last_hb:
            if time.time() > (last_hb[-1].time) + 24 * 60 * 60:
                make_hb = True
        else:
            make_hb = True

        if make_hb is True:
            e = Event()
            e.time = time.time()
            e.event_id = 'heartbeat'
            e.magnitude = 10
            e.lat = 1000
            e.lon = 1000
            e.title = 'ShakeCast Heartbeat'
            e.place = 'ShakeCast is running'
            e.status = 'new'
            e.directory_name = os.path.join(self.data_dir, e.event_id)
            session.add(e)
            session.commit()

            self.get_event_map(e)

        Session.remove()
Пример #4
0
 def get_tweets(user_id, created_at, offset=0, limit=100):
     with Session() as session:
         query = session.query(Tweet).\
                   filter(Tweet.deleted_at == None).\
                   filter(Tweet.user_id == user_id).\
                   filter(Tweet.created_at > created_at)
         for tweet in query.limit(limit).offset(offset).yield_per(10):
             yield tweet
Пример #5
0
 def unfollow(follower_id, followee_id):
   with Session() as session:
     follow = session.query(Follow).\
       filter(Follow.follower_id == follower_id).\
       filter(Follow.followee_id == followee_id).one_or_none()
     if follow:
       follow.deleted_at = datetime.now(timezone.utc)
       session.commit()
Пример #6
0
 def delete(user_id, tweet_id):
     """Delete a tweet"""
     with Session() as session:
         now = datetime.now(timezone.utc)
         tweet = session.query(Tweet). \
           filter(Tweet.id == tweet_id). \
           filter(Tweet.user_id == user_id).one()
         tweet.deleted_at = now
         session.commit()
Пример #7
0
 def login(email, password):
   with Session() as session:
     user = session.query(User).filter(User.email == email).one()
     b_hashed = bytes(user.password, 'utf-8')
     b_password = bytes(password, 'utf-8')
     if bcrypt.checkpw(b_password, b_hashed):
       return make_token(user)
     else:
       raise ValueError('Invalid password')
Пример #8
0
    def send_update_notification(update_info=None):
        '''
        Create notification to alert admin of software updates
        '''
        try:
            not_builder = NotificationBuilder()
            html = not_builder.build_update_html(update_info=update_info)

            #initiate message
            msg = MIMEMultipart()
            msg_html = MIMEText(html, 'html')
            msg.attach(msg_html)

            # find the ShakeCast logo
            logo_str = os.path.join(sc_dir(), 'view', 'static', 'sc_logo.png')

            # open logo and attach it to the message
            logo_file = open(logo_str, 'rb')
            msg_image = MIMEImage(logo_file.read())
            logo_file.close()
            msg_image.add_header('Content-ID', '<sc_logo>')
            msg_image.add_header('Content-Disposition', 'inline')
            msg.attach(msg_image)

            mailer = Mailer()
            me = mailer.me

            # get admin with emails
            session = Session()
            admin = session.query(User).filter(
                User.user_type.like('admin')).filter(User.email != '').all()
            emails = [a.email for a in admin]

            msg['Subject'] = 'ShakeCast Software Update'
            msg['To'] = ', '.join(emails)
            msg['From'] = me

            if len(emails) > 0:
                mailer.send(msg=msg, you=emails)

        except:
            return False

        return True
Пример #9
0
 def GetUser(self, request, context):
   """Returns db user object for the input user ID"""
   user_id = request.user_id
   try:
     with Session() as session:
       user = session.query(User).filter(User.id == user_id).one()
       return UserReply(user=to_pb_user(user))
   except NoResultFound as err:
     context.set_code(grpc.StatusCode.NOT_FOUND)
     context.set_details(f"Unable to find user with ID {user_id}")
Пример #10
0
def learn(whorls, identity):

    """
    increment the count for whorlID probability, whorl, total_visits
    and identity.
    """

    db = Session()
    identity.count = identity.count + 1
    total_visits = db.query(Stat).filter_by(key="total_visits").one()
    total_visits.value = total_visits.value + 1

    for whorl in whorls:
        whorl.count = whorl.count + 1

        for wi in get_whorl_identities(whorls, identity):
            wi.count = wi.count + 1

        db.commit()
Пример #11
0
def get_semesters():
    semesters = Session().query(
        models.Semester
    ).all()

    return {
        'semesters': [
            semester.serialise()
            for semester in semesters
        ]
    }
Пример #12
0
def get_whorls(rawdata):

    db = Session()
    whorls = []
    hashes = [hashed for key, value, hashed in create_hashes(rawdata)]
    whorls = db.query(Whorl).filter(Whorl.hashed.in_(hashes)).all()

    # TODO: if the number of users grows large, we need to limit
    # the whorls we consider, because otherwise the set of users we need
    # to consider gets too large, and the memory and computing requirements
    # will grow too quickly. So we could do something like this:
    #
    # order_by(asc(Whorl.count)).\
    # limit(top)
    #
    # this only looks at rare whorls. This may not be the best solution. When the data
    # is sparse, if a player switches browsers there is very little or no overlap with
    # the whorls generated by the previous browser with this method.

    return whorls
Пример #13
0
def identify_from(whorls):

    db = Session()
    stats = stats_obj(db)
    minprob = float(1) / stats["total_visits"]
    whorl_hashes = list(set([whorl.hashed for whorl in whorls]))

    # this is a dictionary of dictionaries. The inner dictionaries
    # contain probabilities of the whorl given the user.
    whorlids = defaultdict(lambda: defaultdict(lambda: minprob))
    for wid in db.query(WhorlIdentity).filter(WhorlIdentity.whorl_hashed.in_(whorl_hashes)).all():

        whorlids[wid.identity][wid.whorl_hashed] = min(1, float(wid.count) / wid.identity.count)

    # The probabilities above are then used to create a list
    # of probabilities per user for every whorl passed in.
    # The inner dictionary above defaults to a reasonable
    # minimum if we've never seen a whorl for a given user
    givenid = defaultdict(list)
    for identity, idprobs in whorlids.items():
        for whorl in whorls:
            givenid[identity].append(idprobs[whorl.hashed])

    # These are all the probabilities put into a list of tuples so
    # it can be sorted by probability.
    probs = [
        (
            # calculate the posterior probability p(whorl|identity)p(identity)
            reduce(mul, idprobs) * (float(identity.count) / stats["total_visits"]),
            # identity id as a tie breaker in sorting. this is arbitrary. If there
            # is a tie, we just guess. could put a random number here I suppose.
            identity.id,
            # the identity tied to this probability.
            identity,
        )
        for identity, idprobs in givenid.items()
    ]

    probs.sort()
    return probs[-1][2]  # the most likely identity (third element is the identity)
Пример #14
0
 def follow(follower_id, followee_id):
   with Session() as session:
     follow = session.query(Follow). \
       filter(Follow.follower_id == follower_id). \
       filter(Follow.followee_id == followee_id).one_or_none()
     if follow:
       if follow.deleted_at:
         follow.deleted_at = None
         session.commit()
     else:
       follow = Follow(followee_id=followee_id, follower_id=follower_id)
       session.add(follow)
       session.commit()
Пример #15
0
 def create(password, email, phone, name, latitude=0, longtitude=0):
   hashed_password, salt = make_password(password)
   with Session() as session:
     new_user = User(email=email,
                     password=hashed_password,
                     salt=salt,
                     phone=phone,
                     name=name,
                     latitude=latitude,
                     longtitude=longtitude)
     session.add(new_user)
     session.commit()
     token = make_token(new_user)
     return token
Пример #16
0
def get_whorl_identities(whorls, identity):

    db = Session()

    wis = []
    for whorl in whorls:
        try:
            wi = db.query(WhorlIdentity).filter_by(whorl_hashed=whorl.hashed).filter_by(identity_id=identity.id).one()

        except NoResultFound:
            wi = WhorlIdentity(whorl_hashed=whorl.hashed, identity_id=identity.id)
            db.add(wi)
            db.flush()
        wis.append(wi)

    return wis
Пример #17
0
    def post(user_id, content, latitude, longtitude):
        """Post a tweet"""
        if len(content) > 300:
            raise ValueError(
                f"Content length is greater than 300. Got {len(content)}")

        with Session() as session:
            # we need to set created_at here instead of using the default because
            # expunge was called before created_at was set during commit
            new_tweet = Tweet(user_id=user_id,
                              content=content,
                              latitude=latitude,
                              longtitude=longtitude,
                              created_at=datetime.now(timezone.utc))
            session.add(new_tweet)
            # we need to duplicate another one and detach it from the session
            # in order to use it outside of session
            # https://docs.sqlalchemy.org/en/latest/orm/session_state_management.html#merge-tips
            merged_tweet = session.merge(new_tweet)
            session.expunge(merged_tweet)
            session.commit()
            return merged_tweet
Пример #18
0
def create_get_whorls(rawdata):

    whorls = []
    db = Session()

    for key, value, hashed in create_hashes(rawdata):
        try:
            whorl = db.query(Whorl).filter_by(hashed=hashed).one()

        except NoResultFound:
            whorl = Whorl(hashed=hashed, key=key, value=value)
            db.add(whorl)
            db.flush()

        whorls.append(whorl)

    return whorls
Пример #19
0
    def get_new_events(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        sc = SC()

        event_str = ''
        new_events = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            # ignore info from unfavorable networks and low mag eqs
            if (eq['properties']['net'] in self.ignore_nets
                    or eq['properties']['mag'] < sc.new_eq_mag_cutoff):
                continue

            # get event id and all ids
            event = Event()
            event.all_event_ids = eq['properties']['ids']
            if scenario is False:
                event.event_id = eq_id
            else:
                event.event_id = eq_id + '_scenario'
                event.all_event_ids = event.event_id

            # use id and all ids to determine if the event is new and
            # query the old event if necessary
            old_shakemaps = []
            old_notifications = []
            if event.is_new() is False:
                event.status = 'processed'
                ids = event.all_event_ids.strip(',').split(',')
                old_events = [(session.query(Event).filter(
                    Event.event_id == each_id).first()) for each_id in ids]

                # remove older events
                for old_event in old_events:
                    if old_event is not None:
                        old_notifications += old_event.notifications
                        old_shakemaps += old_event.shakemaps

                        # if one of these old events hasn't had
                        # notifications sent, this event should be sent
                        if old_event.status == 'new':
                            event.status = 'new'
                        session.delete(old_event)
            else:
                event.status = 'new'

            # over ride new status if scenario
            if scenario is True:
                event.status = 'scenario'

            # Fill the rest of the event info
            event.directory_name = os.path.join(self.data_dir, event.event_id)
            event.title = self.earthquakes[eq_id]['properties']['title']
            event.place = self.earthquakes[eq_id]['properties']['place']
            event.time = self.earthquakes[eq_id]['properties']['time'] / 1000.0
            event.magnitude = eq['properties']['mag']
            event_coords = self.earthquakes[eq_id]['geometry']['coordinates']
            event.lon = event_coords[0]
            event.lat = event_coords[1]
            event.depth = event_coords[2]

            if old_shakemaps:
                event.shakemaps = old_shakemaps
            if old_notifications:
                event.notifications = old_notifications

            session.add(event)
            session.commit()

            self.get_event_map(event)

            # add the event to the return list and add info to the
            # return string
            new_events += [event]
            event_str += 'Event: %s\n' % event.event_id

        Session.remove()
        print event_str
        return new_events, event_str
Пример #20
0
def _attempt_scrape(semester_uq_id, course_code):
    try:
        semester = Session().query(
            models.Semester
        ).filter(
            models.Semester.uq_id == semester_uq_id
        ).one()
    except NoResultFound:
        raise APINotFoundException("Semester doesn't exist")

    # Don't scrape if we've already got it
    try:
        offering = Session().query(
            models.Offering
        ).join(
            models.Course,
            models.Offering.course_id == models.Course.id,
        ).filter(
            models.Course.course_code == course_code,
            models.Offering.semester_id == semester.id,
        ).one()
        # If we got to here, the offering has already been scraped and we should abort
        raise APIFailureException("Offering has already been scraped")
    except NoResultFound:
        # this is what we want
        pass

    # Actual scrape
    assessment_items = scraper.scrape_assessment(semester.semester, course_code)

    try:
        course = Session().query(
            models.Course
        ).filter(
            models.Course.course_code == course_code
        ).one()
    except NoResultFound:
        course = models.Course(course_code)
        Session().add(course)
        Session().commit()

    session = Session()

    offering = models.Offering(
        course_id=course.id,
        semester_id=semester.id,
    )
    session.add(offering)
    session.flush()

    # Add assessment items
    for item in assessment_items:
        session.add(models.AssessmentItem(
            offering_id=offering.id,
            task_name=item['task'],
            weight=item['weight'],
        ))
    session.commit()

    return offering
Пример #21
0
def create_identity(name):
    db = Session()
    identity = Identity(name=name)
    db.add(identity)
    db.flush()
    return identity
Пример #22
0
 def get_assessment_items(self):
     return Session().query(AssessmentItem).filter(
         AssessmentItem.offering_id == self.id, ).all()
Пример #23
0
 def get_current_semester(cls):
     return Session().query(cls).filter(cls.is_current == True, ).one()
Пример #24
0
    def get_new_shakemaps(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        url_opener = URLOpener()

        shakemap_str = ''
        new_shakemaps = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            if scenario is False:
                eq_url = eq['properties']['detail']
                try:
                    eq_str = url_opener.open(eq_url)
                except:
                    self.log += 'Bad EQ URL: {0}'.format(eq_id)
                try:
                    eq_info = json.loads(eq_str)
                except Exception as e:
                    eq_info = e.partial
            else:
                eq_info = eq

            # check if the event has a shakemap
            if ('shakemap' not in eq_info['properties']['products'].keys()
                    and 'shakemap-scenario'
                    not in eq_info['properties']['products'].keys()):
                continue

            # pulls the first shakemap associated with the event
            shakemap = ShakeMap()

            if scenario is False:
                shakemap.shakemap_id = eq_id
            else:
                shakemap.shakemap_id = eq_id + '_scenario'

            if 'shakemap-scenario' in eq_info['properties']['products'].keys():
                sm_str = 'shakemap-scenario'
            else:
                sm_str = 'shakemap'

            # which shakemap has the highest weight
            weight = 0
            for idx in xrange(len(eq_info['properties']['products'][sm_str])):
                if eq_info['properties']['products'][sm_str][idx][
                        'preferredWeight'] > weight:
                    weight = eq_info['properties']['products'][sm_str][idx][
                        'preferredWeight']
                    shakemap_json = eq_info['properties']['products'][sm_str][
                        idx]

            shakemap.shakemap_version = shakemap_json['properties']['version']

            # check if we already have the shakemap
            if shakemap.is_new() is False:
                shakemap = (session.query(ShakeMap).filter(
                    ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                        ShakeMap.shakemap_version ==
                        shakemap.shakemap_version).first())

            # Check for new shakemaps without statuses; git them a
            # status so we know what to do with them later
            if shakemap.status is None:
                shakemap.status = 'downloading'
            session.add(shakemap)
            session.commit()

            # depricate previous unprocessed versions of the ShakeMap
            dep_shakemaps = (session.query(ShakeMap).filter(
                ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                    ShakeMap.status == 'new')).all()
            for dep_shakemap in dep_shakemaps:
                dep_shakemap.status = 'depricated'

            # assign relevent information to shakemap
            shakemap.map_status = shakemap_json['properties']['map-status']
            shakemap.region = shakemap_json['properties']['eventsource']
            shakemap.lat_max = shakemap_json['properties']['maximum-latitude']
            shakemap.lat_min = shakemap_json['properties']['minimum-latitude']
            shakemap.lon_max = shakemap_json['properties']['maximum-longitude']
            shakemap.lon_min = shakemap_json['properties']['minimum-longitude']
            shakemap.generation_timestamp = shakemap_json['properties'][
                'process-timestamp']
            shakemap.recieve_timestamp = time.time()

            # make a directory for the new event
            shakemap.directory_name = os.path.join(
                self.data_dir, shakemap.shakemap_id,
                shakemap.shakemap_id + '-' + str(shakemap.shakemap_version))
            if not os.path.exists(shakemap.directory_name):
                os.makedirs(shakemap.directory_name)

            # Try to download all prefered products
            for product_name in self.pref_products:
                # if we already have a good version of this product
                # just skip it
                if shakemap.has_products([product_name]):
                    continue

                existing_prod = (session.query(Product).filter(
                    Product.shakemap_id == shakemap.shakecast_id).filter(
                        Product.product_type == product_name)).all()

                if existing_prod:
                    product = existing_prod[0]
                else:
                    product = Product(shakemap=shakemap,
                                      product_type=product_name)

                try:
                    product.json = shakemap_json['contents']['download/%s' %
                                                             product_name]
                    product.url = product.json['url']

                    # download and allow partial products
                    product.str_ = url_opener.open(product.url)

                    # determine if we're writing binary or not
                    if product_name.lower().endswith(
                        ('.png', '.jpg', '.jpeg')):
                        mode = 'wb'
                    else:
                        mode = 'wt'

                    product.file_ = open(
                        '%s%s%s' %
                        (shakemap.directory_name, self.delim, product_name),
                        mode)
                    product.file_.write(product.str_)
                    product.file_.close()

                    product.error = None
                    product.status = 'downloaded'
                except Exception as e:
                    product.status = 'download failed'
                    product.error = '{}: {}'.format(type(e), e)
                    self.log += 'Failed to download: %s %s' % (eq_id,
                                                               product_name)

            # check for event whose id or one of its old ids matches the shakemap id
            if scenario is False:
                event = session.query(Event).filter(
                    Event.all_event_ids.contains(shakemap.shakemap_id)).all()
            else:
                event = session.query(Event).filter(
                    Event.event_id == shakemap.shakemap_id).all()

            if event:
                event = event[0]
                event.shakemaps.append(shakemap)

            if (scenario is False and shakemap.has_products(self.req_products)
                    and shakemap.status == 'downloading'):
                shakemap.status = 'new'
            elif scenario is True:
                shakemap.status = 'scenario'

            session.commit()

            new_shakemaps += [shakemap]
            shakemap_str += 'Wrote %s to disk.\n' % shakemap.shakemap_id

        self.log += shakemap_str
        Session.remove()
        return new_shakemaps, shakemap_str
Пример #25
0
import os

from app import webapp
from orm import DB, Session
import models

webapp.config.update({
    'SQLALCHEMY_DATABASE_URI': os.environ['UQFINAL_DB_URI'],
    'SQLALCHEMY_TRACK_MODIFICATIONS': False,
    'SQLALCHEMY_POOL_SIZE': 0,
    'SQLALCHEMY_POOL_RECYCLE': 500,
})
DB.init_app(webapp)


if __name__ == "__main__":
    import sys
    args = sys.argv
    if len(args) > 1 and args[1] == "generatedb":
        with webapp.app_context():
            s = Session()
            models.ORMBase.metadata.create_all(s.connection())
            s.commit()
    else:
        webapp.run(port=8080)
Пример #26
0
    def get_new_shakemaps(self, scenario=False):
        """
        Checks the json feed for new earthquakes
        """
        session = Session()
        url_opener = URLOpener()

        shakemap_str = ''
        new_shakemaps = []
        for eq_id in self.earthquakes.keys():
            eq = self.earthquakes[eq_id]

            if scenario is False:
                eq_url = eq['properties']['detail']
                try:
                    eq_str = url_opener.open(eq_url)
                except:
                    self.log += 'Bad EQ URL: {0}'.format(eq_id)
                try:
                    eq_info = json.loads(eq_str)
                except Exception as e:
                    eq_info = e.partial
            else:
                eq_info = eq

            # check if the event has a shakemap
            if 'shakemap' not in eq_info['properties']['products'].keys():
                continue

            # pulls the first shakemap associated with the event
            shakemap = ShakeMap()

            if scenario is False:
                shakemap.shakemap_id = eq_id
            else:
                shakemap.shakemap_id = eq_id + '_scenario'
            shakemap.shakemap_version = eq_info['properties']['products'][
                'shakemap'][0]['properties']['version']

            # check if we already have the shakemap
            if shakemap.is_new() is False:
                shakemap = (session.query(ShakeMap).filter(
                    ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                        ShakeMap.shakemap_version ==
                        shakemap.shakemap_version).first())

            shakemap.json = eq_info['properties']['products']['shakemap'][0]

            # check if the shakemap has required products. If it does,
            # it is not a new map, and can be skipped
            if (shakemap.has_products(
                    self.req_products)) and scenario is False:
                continue

            # depricate previous unprocessed versions of the ShakeMap
            dep_shakemaps = (session.query(ShakeMap).filter(
                ShakeMap.shakemap_id == shakemap.shakemap_id).filter(
                    ShakeMap.status == 'new'))
            for dep_shakemap in dep_shakemaps:
                dep_shakemap.status = 'depricated'

            # assign relevent information to shakemap
            shakemap.map_status = shakemap.json['properties']['map-status']
            shakemap.region = shakemap.json['properties']['eventsource']
            shakemap.lat_max = shakemap.json['properties']['maximum-latitude']
            shakemap.lat_min = shakemap.json['properties']['minimum-latitude']
            shakemap.lon_max = shakemap.json['properties']['maximum-longitude']
            shakemap.lon_min = shakemap.json['properties']['minimum-longitude']
            shakemap.generation_timestamp = shakemap.json['properties'][
                'process-timestamp']
            shakemap.recieve_timestamp = time.time()

            if scenario is False:
                shakemap.status = 'new'
            else:
                shakemap.status = 'scenario'

            # make a directory for the new event
            shakemap.directory_name = os.path.join(
                self.data_dir, shakemap.shakemap_id,
                shakemap.shakemap_id + '-' + str(shakemap.shakemap_version))
            if not os.path.exists(shakemap.directory_name):
                os.makedirs(shakemap.directory_name)

            # download products
            for product_name in self.req_products:
                product = Product(shakemap=shakemap, product_type=product_name)

                try:
                    product.json = shakemap.json['contents']['download/%s' %
                                                             product_name]
                    product.url = product.json['url']

                    # download and allow partial products
                    try:
                        product.str_ = url_opener.open(product.url)
                        eq['status'] = 'downloaded'
                    except httplib.IncompleteRead as e:
                        product.web = e.partial
                        eq['status'] = 'incomplete'

                    if product_name.lower().endswith(
                        ('.png', '.jpg', '.jpeg')):
                        mode = 'wb'
                    else:
                        mode = 'wt'
                    product.file_ = open(
                        '%s%s%s' %
                        (shakemap.directory_name, self.delim, product_name),
                        mode)
                    product.file_.write(product.str_)
                    product.file_.close()
                except:
                    self.log += 'Failed to download: %s %s' % (eq_id,
                                                               product_name)

            # check for event whose id or one of its old ids matches the shakemap id
            if scenario is False:
                event = session.query(Event).filter(
                    Event.all_event_ids.contains(shakemap.shakemap_id)).all()
            else:
                event = session.query(Event).filter(
                    Event.event_id == shakemap.shakemap_id).all()

            if event:
                event = event[0]
                event.shakemaps.append(shakemap)

            session.commit()

            new_shakemaps += [shakemap]
            shakemap_str += 'Wrote %s to disk.\n' % shakemap.shakemap_id

        self.log += shakemap_str
        Session.remove()
        print shakemap_str
        return new_shakemaps, shakemap_str
Пример #27
0
    orm = []
    with engine.connect() as conn:
        result = conn.execute(sql_query)
        for x in result:
            sql.append(x)
    for x in orm_query:
        orm.append(x)
    print("ORM query:")
    print(orm)
    print("SQL query:")
    print(sql)

    assert sql == orm


session = Session()


def tester():

    #List, for every boat, the number of times it has been reserved,
    #excluding those boats that have never been reserved (list the id and the name).

    orm_2 = session.query(
        Boats.bid, Boats.bname,
        func.count('reserves.sid')).filter(Boats.bid == Reserves.bid)
    sql_2 = "SELECT b.bid, b.bname, count(r.sid) as TRserved FROM boats b JOIN reserves r ON b.bid = r.bid ;"
    checker(sql_2, orm_2)

    #Find the average age of sailors with a rating of 10.
Пример #28
0
from datetime import date

from alchemyclass import User
from orm import Session, engine, Base
import requests
from time import sleep

Base.metadata.create_all(engine)

session = Session()

username = input('Enter a username to search: ')

url = f'https://api.github.com/users/{username}/repos'

web_page = requests.get(url)
web_page_json = web_page.json()

status = web_page.status_code

query = session.query(User).filter(User.username == username)

if status == 200:
    if query.first() is None:
        print('\n\nUser not exists... Creating a new one!\n\n')
        sleep(2)
        for i in web_page_json:
            print(i['name'])
            insert_ = User(username, i['name'], date.today())
            session.add(insert_)
        print('\nAdding this repositories ^')
Пример #29
0
    price_quote = base_read(PATHS['price_quote'])
    price_quote = convert_boolean(price_quote, ['bracket_pricing'])
    price_quote = convert_datetime(price_quote, ['quote_date'])

    base_load(price_quote, Supplier, COLUMNS_MAP['supplier'], session)
    base_load(price_quote, TubeAssembly, COLUMNS_MAP['tube_assembly'], session)
    base_load(price_quote, PriceQuote, COLUMNS_MAP['price_quote'], session)

    comp_boss = base_read(PATHS['component_boss'])
    comp_boss = convert_boolean(comp_boss,
                                ['groove', 'unique_feature', 'orientation'])

    base_load(comp_boss, ComponentType, COLUMNS_MAP['component_type'], session)
    base_load(comp_boss, ConnectionType, COLUMNS_MAP['connection_type'],
              session)
    base_load(comp_boss, Component, COLUMNS_MAP['component'], session)

    bill_of_materials = base_read(PATHS['material_bill'])
    bill_of_materials = normalize(bill_of_materials, 'tube_assembly_id',
                                  ['component_id', 'quantity'], 8)

    base_load(bill_of_materials, MaterialBill, COLUMNS_MAP['material_bill'],
              session)


if __name__ == "__main__":

    create_db()
    session = Session()
    main()