def should_fix_be_attempted(replay_id, error, extra=None): """ Checks whether we've hit the limit for auto fix attempts for the given replay_id and error. If not, return true and log add log this new fix attempt in the DB. If we have hit the limit return false and log the beef to Sentry :param replay_id: How many faces Void has lost. :param error: Error type :param extra: Any extra data (will be JSON encoded) :return: Boolean """ fix_attempts = ReplayAutoFix.query.filter( ReplayAutoFix.replay_id == replay_id, ReplayAutoFix.error == error ).all() if len(fix_attempts) <= app.config['MAX_REPLAY_FIX_ATTEMPTS']: fix_attempt = ReplayAutoFix(replay_id, error, extra) db.session.add(fix_attempt) db.session.commit() return True else: sentry.captureMessage("Replay {} with error {} has exceeded auto-fix attempts.".format( replay_id, error ), extra=extra) return False
def get_twitter_for_acc(account): consumer_key = app.config['TWITTER_CONSUMER_KEY'] consumer_secret = app.config['TWITTER_CONSUMER_SECRET'] tokens = (OAuthToken.query.with_parent(account).order_by( db.desc(OAuthToken.created_at)).all()) for token in tokens: t = Twitter(auth=OAuth(token.token, token.token_secret, consumer_key, consumer_secret)) try: t.account.verify_credentials() return t except TwitterHTTPError as e: if e.e.code == 401: # token revoked if sentry: sentry.captureMessage('Twitter auth revoked', extra=locals()) db.session.delete(token) db.session.commit() else: raise TemporaryError(e) except URLError as e: raise TemporaryError(e) raise TemporaryError("No access to account {}".format(account))
def get_account_info(client): """Get user's balance info.""" assets = [] total = 0 response = client.query_private('Balance', {}) balance = response.get('result') if not balance: from app import sentry sentry.captureMessage(response.get('error')) return None, None # TODO: # Collect all paris used in balance and query them once together. pair_list = [asset + 'ZUSD' for asset in balance.keys() if asset[0] == 'X'] response = client.query_public('Ticker', {'pair': ','.join(pair_list)}) pair_price = response.get('result') print(response) for asset in balance.keys(): if asset[0] == 'X': pair_name = asset + 'ZUSD' new_asset = Asset(asset, float(balance.get(asset)), float(pair_price.get(pair_name).get('c')[0])) else: new_asset = Asset(asset, amount=float(balance.get(asset)), total=float(balance.get(asset))) assets.append(new_asset.to_dict()) total += new_asset.total return assets, total
def index(): user = {'nickname': 'tatumn'} posts = [{ 'author': { 'nickname': 'John' }, 'body': 'Beautiful day in Portland!' }, { 'author': { 'nickname': 'Susan' }, 'body': 'The Avengers movie was so cool!' }] try: 1 / 0 except Exception: sentry.captureException() sentry.captureMessage('hello, world!') form = NameForm() if form.validate_on_submit(): old_name = session.get('name') if old_name is not None and old_name != form.name.data: flash('Looks like you have changed your name!') session['name'] = form.name.data return redirect(url_for('home.index')) return render_template('html/index.html', title='home', user=user, current_time=datetime.utcnow(), form=form, name=session.get('name'), posts=posts)
def fetch_leagues_from_webapi(cls): """ Fetch a list of leagues from the Dota 2 WebAPI. Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: An array of League objects. """ try: res = steam.api.interface("IDOTA2Match_570").GetLeagueListing(language="en_US").get("result") # Filter out extra entries with the same league id. leagues_by_id = {} for _league in res.get("leagues"): leagues_by_id[int(_league.get("leagueid"))] = _league return leagues_by_id.values() except steam.api.HTTPError: sentry.captureMessage('League.get_all returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('leagues', ignore_expiry=True) # Return data if we have any, else return an empty list return data or list()
def fetch_heroes(cls): """ Fetch a list of heroes from the Dota 2 WebAPI. Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: An array of Hero objects. """ try: res = steam.api.interface("IEconDOTA2_570").GetHeroes(language="en_US").get("result") return list( cls( hero.get("id"), hero.get("name"), hero.get("localized_name") ) for hero in res.get("heroes")) except steam.api.HTTPError: sentry.captureMessage('Hero.get_all returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('heroes', ignore_expiry=True) # Return data if we have any, else return an empty list() return data or list()
def _leagues_data(): _leagues = League.get_all() if len(_leagues) == 0: sentry.captureMessage('Leagues.get_all() returned an empty list.') return [] replay_counts = {x.league_id: x.count for x in db.engine.execute( text(""" SELECT r.league_id as league_id, count(*) as count FROM {replay_table} r WHERE r.league_id in ({league_id_csv}) AND r.state = "ARCHIVED" GROUP BY r.league_id """.format(replay_table=Replay.__tablename__, league_id_csv=",".join(str(x.id) for x in _leagues)) ) )} leagues_with_replays = [] for _league in _leagues: if replay_counts.get(_league.id) > 0: _league.count = replay_counts.get(_league.id) leagues_with_replays.append(_league) # Sort by archived count return sorted(leagues_with_replays, key=lambda r: r.count, reverse=True)
def fetch_leagues(cls): """ Fetch a list of leagues from the Dota 2 WebAPI. Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: An array of League objects. """ try: res = steam.api.interface("IDOTA2Match_570").GetLeagueListing(language="en_US").get("result") return list( cls( _league.get("leagueid"), _league.get("name"), _league.get("description"), _league.get("tournament_url"), _league.get("itemdef") ) for _league in res.get("leagues")) except steam.api.HTTPError: sentry.captureMessage('League.get_all returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('leagues', ignore_expiry=True) # Return data if we have any, else return an empty list return data or list()
def fetch_leagues_from_webapi(cls): """ Fetch a list of leagues from the Dota 2 WebAPI. Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: An array of League objects. """ try: res = steam.api.interface("IDOTA2Match_570").GetLeagueListing( language="en_US").get("result") # Filter out extra entries with the same league id. leagues_by_id = {} for _league in res.get("leagues"): leagues_by_id[int(_league.get("leagueid"))] = _league return leagues_by_id.values() except steam.api.HTTPError: sentry.captureMessage('League.get_all returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('leagues', ignore_expiry=True) # Return data if we have any, else return an empty list return data or list()
def get_api_for_acc(account): app = MastodonApp.query.get(account.mastodon_instance) for token in account.tokens: api = Mastodon( app.client_id, client_secret=app.client_secret, api_base_url='{}://{}'.format(app.protocol, app.instance), access_token=token.token, ratelimit_method='throw', session=make_session(), ) try: # api.verify_credentials() # doesnt error even if the token is revoked lol # https://github.com/tootsuite/mastodon/issues/4637 # so we have to do this: api.timeline() if api.ratelimit_remaining / api.ratelimit_limit < 1 / 4: raise TemporaryError("Rate limit too low") return api except MastodonUnauthorizedError as e: if sentry: sentry.captureMessage('Mastodon auth revoked or incorrect', extra=locals()) db.session.delete(token) db.session.commit() continue except MastodonAPIError as e: raise TemporaryError(e) except (MastodonNetworkError, MastodonRatelimitError) as e: raise TemporaryError(e) raise TemporaryError('No access to account {}'.format(account))
def fetch_schema(): """ Fetches the Dota 2 item schema Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: A steam.items.schema object. None if there was a HTTPError fetching the data and we did not have a file-system fallback. """ try: schema = steam.items.schema(570) schema.client_url # Touch things so steamdeeb caching actually loads data return schema except steam.api.HTTPError: sentry.captureMessage( 'Schema.fetch_schema returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('schema', ignore_expiry=True) # Return data if we have any, else return None return data or None
def log_notify(msg): from app import app if app.config['DEBUG']: print datetime.utcnow(), msg else: from app import sentry sentry.captureMessage(msg)
def fetch_regions(cls): """ Fetch a list of regions via the game's regions.txt Fetches regions.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Falls back to data stored on the file-system in case of a HTTPError. Returns: An array of Hero objects. """ try: req = requests.get(REGION_DATA_URL) # Raise HTTPError if we dont' get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data input_regions = req.json()['regions'] output_regions = [] # Iterate through regions, create an instance of this class for each. for key, region in input_regions.items(): # Skip unspecified - we don't need it if key == "unspecified": continue output_regions.append( cls(_id=int(region.get('region')), matchgroup=int(region.get('matchgroup')) if region.get('matchgroup') else None, latitude=float(region.get('latitude')) if region.get('latitude') else None, longitude=float(region.get('longitude')) if region.get('longitude') else None, display_name_token=region.get('display_name')[1:] if region.get('display_name')[0] == '#' else region.get('display_name'), proxy_allow=region.get('proxy_allow'), division=region.get('division'), alert_at_capacity=bool( region.get('alert_at_capacity', True)), ip_range=region.get('ip_range', []), ip_relay_peers=region.get('ip_relay_peers', []), clusters=map(int, region.get('clusters', [])))) return output_regions except (requests.HTTPError, KeyError): sentry.captureMessage('Region.fetch_regions failed', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get(cls._CACHE_KEY, ignore_expiry=True) # Return data if we have any, else return an empty list() return data or list()
def test_query(): try: rows = TestTable.query.all() return rows except DatabaseError: sentry.captureException() return None except Exception as e: sentry.captureMessage(e) return None
def fetch_regions(cls): """ Fetch a list of regions via the game's regions.txt Fetches regions.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Falls back to data stored on the file-system in case of a HTTPError. Returns: An array of Hero objects. """ try: req = requests.get(REGION_DATA_URL) # Raise HTTPError if we dont' get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data input_regions = req.json()['regions'] output_regions = [] # Iterate through regions, create an instance of this class for each. for key, region in input_regions.items(): # Skip unspecified - we don't need it if key == "unspecified": continue output_regions.append( cls( _id=int(region.get('region')), matchgroup=int(region.get('matchgroup')) if region.get('matchgroup') else None, latitude=float(region.get('latitude')) if region.get('latitude') else None, longitude=float(region.get('longitude')) if region.get('longitude') else None, display_name_token=region.get('display_name')[1:] if region.get('display_name')[0] == '#' else region.get('display_name'), proxy_allow=region.get('proxy_allow'), division=region.get('division'), alert_at_capacity=bool(region.get('alert_at_capacity', True)), ip_range=region.get('ip_range', []), ip_relay_peers=region.get('ip_relay_peers', []), clusters=map(int, region.get('clusters', [])) ) ) return output_regions except (requests.HTTPError, KeyError): sentry.captureMessage('Region.fetch_regions failed', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get(cls._CACHE_KEY, ignore_expiry=True) # Return data if we have any, else return an empty list() return data or list()
def get_daily_balance(self): """Get balance every day.""" from app import sentry db = self.database_service.get_db() db.connect() try: assets, total = get_account_info() Balance.create(total=total) except Exception as e: sentry.captureMessage(e) finally: db.close()
def fetch_heroes(cls): """ Fetch a list of heroes via the game's npc_heroes.txt Fetches npc_heroes.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Falls back to data stored on the file-system in case of a HTTPError or KeyError. Only retrieves ID and token for now, but there's a ton more data available should we ever need it. Returns: An array of Hero objects. """ try: req = requests.get(HERO_DATA_URL) # Raise HTTPError if we don't get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data input_heroes = req.json()['DOTAHeroes'] output_heroes = [] # Iterate through heries, create an instance of this class for each. for key, hero in input_heroes.items(): print(key) # Skip these keys - they're not hero definitions if key in ["Version", "npc_dota_hero_base"]: continue output_heroes.append( cls( _id=int(hero.get('HeroID')), token=key ) ) return output_heroes except (steam.api.HTTPError, KeyError): sentry.captureMessage('Hero.fetch_heroes failed', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get(cls._CACHE_KEY, ignore_expiry=True) # Return data if we have any, else return an empty list() return data or list()
def update_data(cls): """ Fetch a list of heroes via the game's npc_heroes.txt and store the data in the database. Fetches npc_heroes.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Only retrieves ID and token for now, but there's a ton more data available should we ever need it. Returns: An array of Hero objects. """ hero_data = None try: req = requests.get(HERO_DATA_URL) # Raise HTTPError if we don't get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data hero_data = req.json()['DOTAHeroes'] # If fetch failed, yell things except (steam.api.HTTPError, KeyError): sentry.captureMessage('Hero.update_data failed', exc_info=sys.exc_info) return False # Iterate through heroes, updating or creating a database row where appropriate for key, hero in hero_data.items(): # Skip these keys - they're not hero definitions if key in ["Version", "npc_dota_hero_base"]: continue hero_id = int(hero.get('HeroID')) _hero = cls.query.filter(cls.id == hero_id).first() if not _hero: _hero = cls(hero_id, key) _hero.id = hero_id _hero.token = key _hero.name = cls.token_to_name(key) db.session.add(_hero) return db.session.commit()
def handle_user_data(guid, user_type, email, first_name=None, middle_initial=None, last_name=None, terms_of_use=None, email_validation_flag=None, token=None, next_url=None): """ Interpret the result of processing the specified user data and act accordingly: - If a redirect url is returned, redirect to that url. - If a user is returned, login that user and return a redirect to the specified next_url, the home page url, or to the 404 page if next_url is provided and is unsafe. """ redirect_required, user_or_url = _process_user_data( guid, user_type, email, first_name, middle_initial, last_name, terms_of_use, email_validation_flag ) if redirect_required: return jsonify({'next': user_or_url}) else: login_user(user_or_url) _session_regenerate_persist_token() if not is_safe_url(next_url) or True: sentry.client.context.merge( {'user': {'guid': user_or_url.guid, 'auth_user_type': user_or_url.auth_user_type}}) sentry.captureMessage(error_msg.UNSAFE_NEXT_URL) sentry.client.context.clear() return jsonify({'next': url_for('main.index', fresh_login=True, _external=True, _scheme='https')}) jsonify({'next': next_url or url_for('main.index', fresh_login=True, _external=True, _scheme='https')})
def fetch_heroes(cls): """ Fetch a list of heroes via the game's npc_heroes.txt Fetches npc_heroes.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Falls back to data stored on the file-system in case of a HTTPError or KeyError. Only retrieves ID and token for now, but there's a ton more data available should we ever need it. Returns: An array of Hero objects. """ try: req = requests.get(HERO_DATA_URL) # Raise HTTPError if we don't get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data input_heroes = req.json()['DOTAHeroes'] output_heroes = [] # Iterate through heries, create an instance of this class for each. for key, hero in input_heroes.items(): # Skip these keys - they're not hero definitions if key in ["Version", "npc_dota_hero_base"]: continue output_heroes.append( cls(_id=int(hero.get('HeroID')), _token=key)) return output_heroes except (steam.api.HTTPError, KeyError): sentry.captureMessage('Hero.fetch_heroes failed', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get(cls._CACHE_KEY, ignore_expiry=True) # Return data if we have any, else return an empty list() return data or list()
def fetch_tokens(cls, language): """ Fetch a localization file from the game's files. Fetches dota_LANGUAGE.txt as JSON via Dotabuff's d2vpk repository, and parses it for data. Returns: An Localization object """ try: req = requests.get(LOCALIZATION_DATA_URL.format(language)) # Raise HTTPError if we don't get HTTP OK if req.status_code != requests.codes.ok: raise requests.HTTPError("Response not HTTP OK") # Fetch relevant pieces of data from JSON data input_data = req.json()['lang'] return input_data.get('Tokens') except (requests.HTTPError, KeyError): sentry.captureMessage('Localization.fetch_tokens failed', exc_info=sys.exc_info) return []
def fetch_schema(): """ Fetches the Dota 2 item schema Uses steamodd to interface with the WebAPI. Falls back to data stored on the file-system in case of a HTTPError when interfacing with the WebAPI. Returns: A steam.items.schema object. None if there was a HTTPError fetching the data and we did not have a file-system fallback. """ try: schema = steam.items.schema(570) schema.client_url # Touch things so steamdeeb caching actually loads data return schema except steam.api.HTTPError: sentry.captureMessage('Schema.fetch_schema returned with HTTPError', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('schema', ignore_expiry=True) # Return data if we have any, else return None return data or None
def should_fix_be_attempted(replay_id, error, extra=None): """ Checks whether we've hit the limit for auto fix attempts for the given replay_id and error. If not, return true and log add log this new fix attempt in the DB. If we have hit the limit return false and log the beef to Sentry :param replay_id: How many faces Void has lost. :param error: Error type :param extra: Any extra data (will be JSON encoded) :return: Boolean """ fix_attempts = ReplayAutoFix.query.filter( ReplayAutoFix.replay_id == replay_id, ReplayAutoFix.error == error).all() if len(fix_attempts) <= app.config['MAX_REPLAY_FIX_ATTEMPTS']: fix_attempt = ReplayAutoFix(replay_id, error, extra) db.session.add(fix_attempt) db.session.commit() return True else: sentry.captureMessage( "Replay {} with error {} has exceeded auto-fix attempts.".format( replay_id, error), extra=extra) return False
def fetch_items(cls): """ Fetch a list of items from a non-public JSON feed. Falls back to data stored on the file-system in case of any problems fetching the data. Returns: A dict containing data on Dota 2 items, mapped by their item IDs. An empty dict if there was any errors fetching the data and we did not have a file-system fallback. """ try: request = requests.get("http://www.dota2.com/jsfeed/itemdata") if request.status_code == requests.codes.ok: try: data = request.json()["itemdata"] return list( cls( v.get('id'), k, v.get('dname'), v.get('img'), v.get('qual'), v.get('cost'), v.get('desc'), v.get('notes'), v.get('attrib'), v.get('mc'), v.get('cd'), v.get('lore'), v.get('components'), v.get('created') ) for k, v in data.iteritems() ) except (KeyError, ValueError) as e: if current_app.debug: raise e current_app.logger.warning('Item.get_all threw exception', exc_info=True, extra={ 'extra': json.dumps({ 'url': request.url, 'text': request.text, 'status_code': request.status_code, }) }) else: current_app.logger.warning('Item.get_all returned with non-OK status', extra={ 'extra': json.dumps({ 'url': request.url, 'text': request.text, 'status_code': request.status_code, }) }) except requests.exceptions.RequestException: sentry.captureMessage('Item.get_all returned with RequestException', exc_info=sys.exc_info) # Try to get data from existing cache entry data = fs_cache.cache.get('items', ignore_expiry=True) # Return data if we have any, else return an empty list return data or list()
def update_masteries(user_id, correct_words, wrong_words): # Populate this updates array to send to update_masteries updates = [] # Loop through all words that were in questions that were answered correctly for word in correct_words: existing_update_index = None # Search for an update if one already exists with this word for (idx, update) in enumerate(updates): if update["chinese"] == word: existing_update_index = idx if existing_update_index is not None: # Update the existing update if there is one updates[existing_update_index]["change"] += 1 else: # Create a new update for this word otherwise updates.append({ "change": 1, "chinese": word }) # Loop through all words that were in questions that were answered incorrectly for word in wrong_words: existing_update_index = None # Search for an update if one already exists with this word for (idx, update) in enumerate(updates): if update["chinese"] == word: existing_update_index = idx if existing_update_index is not None: # Update the existing update if there is one updates[existing_update_index]["change"] -= 1 else: # Create a new update for this word otherwise updates.append({ "change": -1, "chinese": word }) # Get all words from the updates and convert them to entry ids words = [update["chinese"] for update in updates] entries = Entry.query.filter(Entry.chinese.in_(words)).all() entry_ids = [] # Add entry ids to the updates with matching Chinese text for entry in entries: update_index = None for (idx, datum) in enumerate(updates): if datum["chinese"] == entry.chinese: update_index = idx entry_ids.append(entry.id) updates[update_index]["entry_id"] = entry.id # Log any masteries that couldn't find entries to Sentry for update in updates: if "entry_id" not in update: if sentry is not None: sentry.captureMessage("Entry could not be found for mastery update: %s" % update["chinese"], extra={ "update": update }) else: print("Entry could not be found for mastery update: %s" % update["chinese"]) # Clear all updates that don't have associated entries updates = list(filter(lambda x: "entry_id" in x, updates)) # Find this user's masteries by the entry ids list masteries = Mastery.query \ .filter_by(user_id=user_id) \ .filter(Mastery.entry_id.in_(entry_ids)) # Update the mastery value for all of the masteries that already exist for mastery in masteries: update = None update_index = 0 # Find the update that matches this mastery for (idx, datum) in enumerate(updates): if datum["entry_id"] == mastery.entry_id: update = datum update_index = idx new_mastery_value = mastery.mastery + update["change"] # Make sure the mastery value is 0 ≤ n ≤ 10 if new_mastery_value < 0: new_mastery_value = 0 elif new_mastery_value > 10: new_mastery_value = 10 mastery.mastery = new_mastery_value # Mark this update as done so we know which ones don't have associated masteries updates[update_index]["done"] = True new_masteries = [] # Create new masteries for the updates that weren't taken care of earlier for update in updates: if "done" not in update: mastery = Mastery(user_id, update["entry_id"], max(0, update["change"])) new_masteries.append(mastery) # Add new masteries to the database db.session.add_all(new_masteries) # Commit new masteries and updates to existing masteries db.session.commit()