def score_message(hug_token, message_id: hug.types.number, score: hug.types.number): try: msg = Message.get(Message.token == Token.get(Token.token == hug_token), Message.id == message_id, Message.reply_to != None) Score.create(message=msg, value=score) return {"status": "ok"} except Message.DoesNotExist: raise HTTPError(HTTP_404) except peewee.IntegrityError: raise HTTPError(HTTP_409)
def scrape_piece(self, db_piece): """Scrape a piece page associated with a database piece entry. Populates the database with Score objects based on what is parsed off the page. """ # Download and parse the piece page try: piece_page = PiecePage(db_piece.url) scores = piece_page.parse_scores() metadata = piece_page.parse_metadata() except (PageRequestFailure, PageParseFailure) as e: if isinstance(e, PageParseFailure): self._logger.warning("Failed to parse page at {}".format( db_piece.url)) else: self._logger.warning( "Failed to download piece page at {}:".format( db_piece.url)) self._logger.warning(e.original_exception) db_piece.failed_scrape = True commit_session(self._session) return # Create scores associated with this piece. scores_to_add = [] for score in scores: for dli in score.get('dl_links', []): if not self._score_in_database(dli): db_score = Score(piece=db_piece, composer=db_piece.composer) db_score.url = dli db_score.file_format = dli.split('.')[-1] db_score.name = score.get('meta', {}).get('CPDL#', '') scores_to_add.append(db_score) self._session.add_all(scores_to_add) # Save scraping data in DB. metadata['scores'] = scores db_piece.json_metadata = json.dumps(metadata) db_piece.html_dump = piece_page.get_raw_html() db_piece.scraped = True commit_session(self._session) self._logger.info( "Successfully scraped {} scores from piece {}.".format( len(scores_to_add), db_piece.name))
def current_data_geojson(zone, score_key, date_key): # Grab the data first scores = Score.by_tag_type_with_date(zone, score_key, date_key) out = {} for key, val in scores.items(): out[int(key)] = val # Read in the json file with open(f'static/data/{zone}_bg.geojson') as bgfile: data = json.load(bgfile) for feature in data['features']: feature['properties']['score'] = out[int(feature['properties']['GEOID'])] return Response(json.dumps(data), mimetype='application/json', headers={'Content-Disposition':f'attachment;filename=ted_{zone}_{score_key}_{date_key}.geojson'})
def score(zone, score_key, date_key): """ Data API call to retrieve score information. Format of score_key should be: measure_destination_function_date_period, where measure = the metric used (Access, Equity, Etc) destination = the destination (jobs, snap stores, etc) function = the measurement function (cumulative 45min, travel time) date = date key for the measure period = Morning peak (MP), etc. """ date = dt.datetime.strptime(date_key, "%Y-%m-%d") scores = Score.by_tag_type_with_date(zone, score_key, date_key) return jsonify(scores)
def data_theme(theme, zone, score_key): if theme == '1': data = Score.weighted_average_all_pop_types(zone, score_key) return jsonify(data)
def primal_download(zone, date_key): scores = Score.by_tag_type_with_date_primal(zone, date_key) return send_csv(scores.to_dict(orient='records'), f"ted_{zone}_{date_key}_cumulative.csv", scores.columns)
def all_data_csv(zone, date_key): scores = Score.by_tag_type_with_date_all(zone, date_key) return send_csv(scores.to_dict(orient='records'), f"ted_{zone}_{date_key}_all.csv", scores.columns)
def current_data_csv(zone, score_key, date_key): scores = Score.by_tag_type_with_date(zone, score_key, date_key) out = [] for key, val in scores.items(): out.append({'block_group': key, 'score': val}) return send_csv(out, f"ted_{zone}_{score_key}_{date_key}.csv", ['block_group', 'score'])