Beispiel #1
0
    def _scrape(self, **kwargs):
        runner = WebdriverRunner()
        results = runner.run(WebdriverSteps().go_to_url(
            self.CASES_URL).wait_for_number_of_elements(
                (By.XPATH, '//canvas'), 58).find_request(
                    'cases', find_by=tableau.find_tableau_request
                ).clear_request_history().go_to_url(
                    self.DEATHS_URL).wait_for_number_of_elements(
                        (By.XPATH, '//canvas'),
                        29).find_request('deaths',
                                         find_by=tableau.find_tableau_request))

        parser = tableau.TableauParser(request=results.requests['cases'])

        raw_date_str = pydash.head(
            parser.extract_data_from_key('cases')['ATTR(Date Updated)'])
        date = datetime.strptime(raw_date_str, '%A, %B %d, %Y').date()

        confirmed_cases = pydash.head(
            parser.extract_data_from_key('cases')
            ['SUM(# Lab Confirmed Cases)'])
        probable_cases = pydash.head(
            parser.extract_data_from_key('probable cases')['SUM(# probable)'])
        cases = confirmed_cases + probable_cases
        cases_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('raceth')).set_index('sub-category')
        aa_cases = cases_df.loc['Black']['SUM(count)']
        known_race_cases = cases - cases_df.loc['unknown']['SUM(count)']

        parser = tableau.TableauParser(request=results.requests['deaths'])
        deaths = pydash.head(
            parser.extract_data_from_key('death (2)')
            ['SUM(# lab confirmed deaths)'])
        deaths_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('raceth (death)')).set_index(
                'sub-category')
        deaths_df = deaths_df.assign(Count=[
            round(v * deaths) for v in deaths_df['SUM(% of deaths)'].values
        ])
        aa_deaths = deaths_df.loc['Black']['Count']
        known_race_deaths = deaths - deaths_df.loc['unknown']['Count']

        pct_aa_cases = misc.to_percentage(aa_cases, known_race_cases)
        pct_aa_deaths = misc.to_percentage(aa_deaths, known_race_deaths)

        return [
            self._make_series(
                date=date,
                cases=cases,
                deaths=deaths,
                aa_cases=aa_cases,
                aa_deaths=aa_deaths,
                pct_aa_cases=pct_aa_cases,
                pct_aa_deaths=pct_aa_deaths,
                pct_includes_unknown_race=False,
                pct_includes_hispanic_black=False,
                known_race_cases=known_race_cases,
                known_race_deaths=known_race_deaths,
            )
        ]
    def _scrape(self, **kwargs):
        runner = WebdriverRunner()
        results = runner.run(WebdriverSteps().go_to_url(
            self.CASES_URL).wait_for_number_of_elements(
                (By.XPATH, '//canvas'), 38).find_request(
                    'cases', find_by=tableau.find_tableau_request
                ).clear_request_history().go_to_url(
                    self.DEATHS_URL).wait_for_number_of_elements(
                        (By.XPATH, '//canvas'),
                        20).find_request('deaths',
                                         find_by=tableau.find_tableau_request))

        parser = tableau.TableauParser(request=results.requests['cases'])

        date_str = pydash.head(
            parser.extract_data_from_key('Footer')['AGG(Today)'])
        date = datetime.strptime(date_str, '%m-%d-%y').date()

        cases = pydash.head(
            parser.extract_data_from_key('Total Cases')['AGG(Total Cases)'])
        deaths = pydash.head(
            parser.extract_data_from_key('Total  Deaths')
            ['SUM(Count Of Deaths)'])
        cases_pct_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('Race Breakdown ')).set_index('Race')
        cases_df = cases_pct_df.assign(Count=[
            round(v * cases) for v in cases_pct_df['CNTD(Caseid 1)'].values
        ])
        aa_cases = cases_df.loc['Black']['Count']
        known_race_cases = cases - cases_df.loc['Unknown']['Count']

        parser = tableau.TableauParser(request=results.requests['deaths'])
        deaths_pct_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('Bar | Race')).set_index('Race')
        deaths_df = deaths_pct_df.assign(Count=[
            round(v * deaths) for v in deaths_pct_df['SUM(Death Count)'].values
        ])
        aa_deaths = deaths_df.loc['Black']['Count']
        known_race_deaths = deaths - deaths_df.loc['Unknown']['Count']

        pct_aa_cases = misc.to_percentage(aa_cases, known_race_cases)
        pct_aa_deaths = misc.to_percentage(aa_deaths, known_race_deaths)

        return [
            self._make_series(date=date,
                              cases=cases,
                              deaths=deaths,
                              aa_cases=aa_cases,
                              aa_deaths=aa_deaths,
                              pct_aa_cases=pct_aa_cases,
                              pct_aa_deaths=pct_aa_deaths,
                              pct_includes_unknown_race=False,
                              pct_includes_hispanic_black=True,
                              known_race_cases=known_race_cases,
                              known_race_deaths=known_race_deaths)
        ]
    def _scrape(self, **kwargs):
        runner = WebdriverRunner()
        results = runner.run(WebdriverSteps().go_to_url(
            self.CASES_URL).find_request('cases',
                                         find_by=tableau.find_tableau_request).
                             clear_request_history().go_to_url(
                                 self.DEATHS_URL).find_request(
                                     'deaths',
                                     find_by=tableau.find_tableau_request))

        parser = tableau.TableauParser(request=results.requests['cases'])
        raw_date_str = pydash.head(
            parser.extract_data_from_key('cases')['ATTR(dateupdated)'])
        date = datetime.strptime(raw_date_str, '%m/%d/%Y').date()

        cases = pydash.head(
            parser.extract_data_from_key('cases')
            ['SUM(Laboratory Confirmed Cases)'])
        cases_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('raceth')).set_index('subcategory')
        aa_cases = cases_df.loc['Black']['SUM(count)']
        known_race_cases = cases - cases_df.loc['Unknown']['SUM(count)']

        parser = tableau.TableauParser(request=results.requests['deaths'])
        deaths = pydash.head(
            parser.extract_data_from_key('death (2)')['SUM(Deaths)'])
        deaths_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('raceth (death)')).set_index(
                'subcategory')
        aa_deaths = deaths_df.loc['Black']['SUM(count)']
        known_race_deaths = deaths - deaths_df.loc['Unknown']['SUM(count)']

        pct_aa_cases = misc.to_percentage(aa_cases, known_race_cases)
        pct_aa_deaths = misc.to_percentage(aa_deaths, known_race_deaths)

        return [
            self._make_series(
                date=date,
                cases=cases,
                deaths=deaths,
                aa_cases=aa_cases,
                aa_deaths=aa_deaths,
                pct_aa_cases=pct_aa_cases,
                pct_aa_deaths=pct_aa_deaths,
                pct_includes_unknown_race=False,
                pct_includes_hispanic_black=False,
                known_race_cases=known_race_cases,
                known_race_deaths=known_race_deaths,
            )
        ]
 def __init__(self, vql_response):
     self.vql_response = vql_response
     self.section_with_data = pydash.head([
         v for _, v in pydash.get(self.vql_response, self.SECTION_KEY,
                                  {}).items()
         if pydash.get(v, 'presModelHolder.flipboard')
     ])
 def __init__(self, viz_data, json_data):
     self.viz_data = viz_data
     self.json_data = json_data
     self.section_with_data = pydash.head([
         v for _, v in pydash.get(self.viz_data, self.SECTION_KEY,
                                  {}).items()
         if pydash.get(v, 'presModelHolder.flipboard')
     ])
Beispiel #6
0
def update_user(id, name, game_name, unique_id, speaking, is_me, my_channel):
	old_user = _.head(_users.where(id=id))
	if old_user:
		_users.remove(old_user)
		new_user = DataObject(id=id, name=name, game_name=game_name,
			unique_id=unique_id, speaking=speaking, is_me=is_me,
			my_channel=my_channel)
		_users.insert(new_user)
		messages.publish(UserMessage("modified", new_user))
Beispiel #7
0
def main():
    global runner
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    load_dotenv(dotenv_path='.env')
    flag_argnames = [
        'load_model', 'use_adaptive_softmax', 'use_fast_sampler',
        'dont_use_hardcoded_cutoffs', 'use_ranking_loss',
        'dont_use_deep_network', 'use_cnn_local', 'use_lstm_local',
        'dont_freeze_word_embeddings', 'dont_continue_training', 'cheat',
        'use_conll', 'use_wiki2vec'
    ]
    args = getopt.getopt(
        _.tail(sys.argv), '',
        flag_argnames + [arg['name'] + '=' for arg in args_with_values])[0]
    flags = [_.head(arg) for arg in args]
    train_params = m(use_fast_sampler='--use_fast_sampler' in flags)
    run_params = m(load_model='--load_model' in flags,
                   cheat='--cheat' in flags,
                   continue_training='--dont_continue_training' not in flags,
                   use_conll='--use_conll' in flags)
    model_params = m(use_adaptive_softmax='--use_adaptive_softmax' in flags,
                     use_hardcoded_cutoffs='--dont_use_hardcoded_cutoffs'
                     not in flags,
                     use_ranking_loss='--use_ranking_loss' in flags,
                     use_cnn_local='--use_cnn_local' in flags,
                     use_lstm_local='--use_lstm_local' in flags,
                     use_deep_network='--dont_use_deep_network' not in flags,
                     freeze_word_embeddings='--dont_freeze_word_embeddings'
                     not in flags,
                     use_wiki2vec='--use_wiki2vec' in flags)
    paths = m(lookups=os.getenv("LOOKUPS_PATH"),
              page_id_order=os.getenv("PAGE_ID_ORDER_PATH"))
    for arg in args_with_values:
        name = arg['name']
        pair = _.find(args, lambda pair: name in pair[0])
        if pair:
            parsed = arg['type'](pair[1])
            if arg['for'] == 'path':
                paths = paths.set(name, parsed)
            elif arg['for'] == 'model_param':
                model_params = model_params.set(name, parsed)
            elif arg['for'] == 'train_param':
                train_params = train_params.set(name, parsed)
            elif arg['for'] == 'run_param':
                run_params = run_params.set(name, parsed)
            else:
                raise ValueError(
                    '`args_with_values` contains unsupported param group ' +
                    arg['for'])
    name_pair = _.find(args, lambda pair: 'name' in pair[0])
    name = name_pair[1] if name_pair else ''
    runner = Runner(device=device,
                    paths=paths,
                    train_params=train_params,
                    model_params=model_params,
                    run_params=run_params)
    runner.run()
Beispiel #8
0
def set_players_speaking(player_ids, speaking):
	for player_id in player_ids:
		old_player = _.head(_speaking_players.where(id=player_id))
		if speaking and not old_player:
			new_player = DataObject(id=player_id)
			_speaking_players.insert(new_player)
			messages.publish(PlayerSpeakingMessage("added", new_player))
		elif not speaking and old_player:
			_speaking_players.remove(old_player)
			messages.publish(PlayerSpeakingMessage("removed", old_player))
Beispiel #9
0
def set_players_speaking(player_ids, speaking):
    for player_id in player_ids:
        old_player = _.head(_speaking_players.where(id=player_id))
        if speaking and not old_player:
            new_player = DataObject(id=player_id)
            _speaking_players.insert(new_player)
            messages.publish(PlayerSpeakingMessage("added", new_player))
        elif not speaking and old_player:
            _speaking_players.remove(old_player)
            messages.publish(PlayerSpeakingMessage("removed", old_player))
Beispiel #10
0
def update_user(id, name, game_name, unique_id, speaking, is_me, my_channel):
    old_user = _.head(_users.where(id=id))
    if old_user:
        _users.remove(old_user)
        new_user = DataObject(id=id,
                              name=name,
                              game_name=game_name,
                              unique_id=unique_id,
                              speaking=speaking,
                              is_me=is_me,
                              my_channel=my_channel)
        _users.insert(new_user)
        messages.publish(UserMessage("modified", new_user))
Beispiel #11
0
def self_desc(cls):
    '''Method to get self description, used at init.'''
    desc_list = [f'{get_class_name(cls)}:']
    for k, v in get_class_attr(cls).items():
        if k == 'spec':
            desc_v = v['name']
        elif ps.is_dict(v) or ps.is_dict(ps.head(v)):
            desc_v = to_json(v)
        else:
            desc_v = v
        desc_list.append(f'- {k} = {desc_v}')
    desc = '\n'.join(desc_list)
    return desc
Beispiel #12
0
def insert_pairing(user_unique_id, player_id):
	if not _pairings.where(user_unique_id=user_unique_id, player_id=player_id):
		# Insert pairing
		pairing = DataObject(user_unique_id=user_unique_id, player_id=player_id)
		_pairings.insert(pairing)
		# Insert pair's user to cache
		user = _.head(_users.where(unique_id=user_unique_id))
		if user:
			_cached_users.delete(unique_id=user.unique_id)
			_cached_users.insert(DataObject(unique_id=user.unique_id, name=user.name))
		# Insert pair's player to cache
		result = _battle_players.where(id=player_id)
		if not result:
			result = _prebattle_players.where(id=player_id)
		if not result:
			result = _me_player.where(id=player_id)
		player = _.head(result)
		if player:
			_cached_players.delete(id=player.id)
			_cached_players.insert(DataObject(id=player.id, name=player.name))
		# Notify listeners
		messages.publish(PairingMessage("added", pairing))
Beispiel #13
0
def insert_pairing(user_unique_id, player_id):
    if not _pairings.where(user_unique_id=user_unique_id, player_id=player_id):
        # Insert pairing
        pairing = DataObject(user_unique_id=user_unique_id,
                             player_id=player_id)
        _pairings.insert(pairing)
        # Insert pair's user to cache
        user = _.head(_users.where(unique_id=user_unique_id))
        if user:
            _cached_users.delete(unique_id=user.unique_id)
            _cached_users.insert(
                DataObject(unique_id=user.unique_id, name=user.name))
        # Insert pair's player to cache
        result = _battle_players.where(id=player_id)
        if not result:
            result = _prebattle_players.where(id=player_id)
        if not result:
            result = _me_player.where(id=player_id)
        player = _.head(result)
        if player:
            _cached_players.delete(id=player.id)
            _cached_players.insert(DataObject(id=player.id, name=player.name))
        # Notify listeners
        messages.publish(PairingMessage("added", pairing))
Beispiel #14
0
def describe(cls):
    desc_list = [f'{get_cls_name(cls)}:']
    for k, v in get_cls_attr(cls).items():
        if k == '_config':
            try:
                desc_v = v['name']
            except:
                continue
        elif ps.is_dict(v) or ps.is_dict(ps.head(v)):
            desc_v = pformat(v)
        else:
            desc_v = v
        desc_list.append(f'- {k} = {desc_v}')
    desc = '\n'.join(desc_list)
    return desc
    def all(cls, *args):
        try:
            dem_args = list(args)
            if some(dem_args, lambda result: isinstance(result, Error)):
                return head(
                    filter_(dem_args,
                            lambda result: isinstance(result, Error)))

            if every(dem_args, lambda result: isinstance(result, Ok) == False):
                return Error(
                    Exception('Some items passed in were not a Result.'))

            return Ok(map_(dem_args, lambda result: result.getValue()))
        except Exception as e:
            return Error(e)
Beispiel #16
0
def self_desc(cls, omit=None):
    '''Method to get self description, used at init.'''
    desc_list = [f'{get_class_name(cls)}:']
    omit_list = ps.compact(cast_list(omit))
    for k, v in get_class_attr(cls).items():
        if k in omit_list:
            continue
        if k == 'spec':  # spec components are described at their object level; for session, only desc spec.meta
            desc_v = pformat(v['meta'])
        elif ps.is_dict(v) or ps.is_dict(ps.head(v)):
            desc_v = pformat(v)
        else:
            desc_v = v
        desc_list.append(f'- {k} = {desc_v}')
    desc = '\n'.join(desc_list)
    return desc
Beispiel #17
0
def get_cli_args(args):
    args = args + [
        _.defaults(
            {
                'name': _flip_dont(arg['name']),
                'default': not arg['default']
            }, arg) for arg in args if arg['type'] == 'flag'
    ]
    args_with_values = list(filter(lambda arg: arg['type'] != 'flag', args))
    flag_argnames = [
        arg['name'] for arg in filter(lambda arg: arg['type'] == 'flag', args)
    ]
    cli_args = getopt.getopt(
        _.tail(sys.argv), '',
        flag_argnames + [arg['name'] + '=' for arg in args_with_values])[0]
    flags = [_.head(arg) for arg in cli_args]
    train_params, run_params, model_params = m(), m(), m()
    paths = m()
    for arg in args:
        name = arg['name']
        pair = _.find(cli_args, lambda pair: name in pair[0])
        if pair:
            if arg['type'] == 'flag':
                val = '--' + arg['name'] in flags
            else:
                val = arg['type'](pair[1])
        else:
            val = arg['default']
        if arg['for'] == 'path':
            paths = paths.set(name, val)
        elif arg['for'] == 'model_params':
            model_params = model_params.set(name, val)
        elif arg['for'] == 'train_params':
            train_params = train_params.set(name, val)
        elif arg['for'] == 'run_params':
            run_params = run_params.set(name, val)
        else:
            raise ValueError(
                '`args_with_values` contains unsupported param group ' +
                arg['for'])
    return m(train=train_params, run=run_params, model=model_params)
Beispiel #18
0
def migrate_user_cache_0_6_to_0_7(source_dirpath, dest_dirpath):
	"""
	This function migrates tessu_mod_cache.ini to following files:
	 * users_cache.v1.json
	 * players_cache.v1.json
	 * pairings_cache.v1.json
	"""
	source_filepath   = os.path.join(source_dirpath, "tessu_mod_cache.ini")
	users_filepath    = os.path.join(dest_dirpath, constants.USERS_CACHE_FILE)
	players_filepath  = os.path.join(dest_dirpath, constants.PLAYERS_CACHE_FILE)
	pairings_filepath = os.path.join(dest_dirpath, constants.PAIRINGS_CACHE_FILE)
	backup_filepath   = os.path.join(dest_dirpath, "tessu_mod_cache.ini.old-0.6")

	source_exists = os.path.isfile(source_filepath)
	dest_exists = all(map(os.path.isfile, [users_filepath, players_filepath, pairings_filepath]))

	if source_exists and not dest_exists:
		logger.info("Migrating caches from version 0.6 to 0.7")

		# Schema for new caches
		users = Table()
		users.create_index('unique_id', unique=True)
		players = Table()
		players.create_index('id', unique=True)
		pairings = Table()
		pairings.create_index('player_id')
		pairings.create_index('user_unique_id')

		# Load old 0.6.x cache file
		parser = ConfigParser.ConfigParser()
		with open(source_filepath, "rb") as file:
			parser.readfp(file)

		# Build new cache structures
		users.insert_many(DataObject(unique_id=id, name=name) for name, id in parser.items("TeamSpeakUsers"))
		players.insert_many(DataObject(id=int(id), name=name) for name, id in parser.items("GamePlayers"))
		for user_name, player_names in parser.items("UserPlayerPairings"):
			userid = _.head(users.where(name=user_name)).unique_id
			for player_name in list(csv.reader([player_names]))[0]:
				playerid = _.head(players.where(name=player_name)).id
				pairings.insert(DataObject(player_id=int(playerid), user_unique_id=userid))

		# Remove users & players which do not exist in pairings
		for user in users.clone():
			if not pairings.where(user_unique_id=user.unique_id):
				users.remove(user)
		for player in players.clone():
			if not pairings.where(player_id=player.id):
				players.remove(player)

		# create destination directory if it doesn't exist yet
		if not os.path.isdir(dest_dirpath):
			os.makedirs(dest_dirpath)

		# write out the new cache files
		users.json_export(users_filepath)
		players.json_export(players_filepath)
		pairings.json_export(pairings_filepath)

		# backup and remove old cache file
		backup_filepath = os.path.join(dest_dirpath, os.path.basename(source_filepath)) + ".old-0.6"
		if os.path.isfile(backup_filepath):
			os.remove(backup_filepath)
		os.rename(source_filepath, backup_filepath)
 def extract_rows(self, json_data):
     source_key = pydash.head(list(
         json_data['chart']['datasources'].keys()))
     return json_data['chart']['datasources'][source_key]['data']['rows']
Beispiel #20
0
def remove_user(id):
    old_user = _.head(_users.where(id=id))
    if not old_user:
        return
    _users.remove(old_user)
    messages.publish(UserMessage("removed", old_user))
Beispiel #21
0
def get_my_player_id():
	player = _.head(_me_player)
	if player:
		return player.id
 def column(self) -> Optional[ColumnLabel]:
     return pydash.head(
         [label for label in self.labels if isinstance(label, ColumnLabel)])
Beispiel #23
0
def get_my_vehicle():
	player = _.head(_me_player)
	if player:
		return _.head(_vehicles.where(player_id=player.id))
Beispiel #24
0
def remove_vehicle(id):
    vehicle = _.head(_vehicles.where(id=id))
    if vehicle:
        _vehicles.remove(vehicle)
        messages.publish(VehicleMessage("removed", vehicle))
 def surface(self) -> Optional[SurfaceLabel]:
     return pydash.head([
         label for label in self.labels if isinstance(label, SurfaceLabel)
     ])
Beispiel #26
0
def get_my_vehicle():
    player = _.head(_me_player)
    if player:
        return _.head(_vehicles.where(player_id=player.id))
Beispiel #27
0
def get_player_vehicle(player_id):
    return _.head(_vehicles.where(player_id=player_id))
Beispiel #28
0
def get_my_player_id():
    player = _.head(_me_player)
    if player:
        return player.id
Beispiel #29
0
def remove_vehicle(id):
	vehicle = _.head(_vehicles.where(id=id))
	if vehicle:
		_vehicles.remove(vehicle)
		messages.publish(VehicleMessage("removed", vehicle))
Beispiel #30
0
def remove_prebattle_player(id):
	player = _.head(_prebattle_players.where(id=id))
	if player:
		_prebattle_players.remove(player)
		messages.publish(PrebattlePlayerMessage("removed", player))
Beispiel #31
0
    def _scrape(self, **kwargs):
        runner = WebdriverRunner()
        cases_results = runner.run(WebdriverSteps().go_to_url(
            self.CASES_URL).find_request(key='cases',
                                         find_by=find_tableau_request))

        deaths_results = runner.run(WebdriverSteps().go_to_url(
            self.DEATHS_URL).find_request(key='deaths',
                                          find_by=find_tableau_request))

        assert cases_results.requests['cases'], 'No results found for `cases`'
        resp_body = cases_results.requests['cases'].response.body.decode(
            'utf8')
        tableau_parser = TableauParser(resp_body)

        parsed_date = tableau_parser.extract_data_from_key('Date Updated')
        assert 'Date Updated' in parsed_date, 'Unable to parse date'
        assert len(parsed_date['Date Updated']) == 1, 'Unable to parse date'
        date_str = pydash.head(parsed_date['Date Updated'])
        date = datetime.strptime(date_str, '%m/%d/%Y').date()

        parsed_num_cases = tableau_parser.extract_data_from_key(
            'Number of Cases')
        assert 'SUM(Number of Records)' in parsed_num_cases, 'Key not found, unable to parse number of records'
        assert len(parsed_num_cases['SUM(Number of Records)']
                   ) == 1, 'Parsing error might have occurred'
        cases = pydash.head(parsed_num_cases['SUM(Number of Records)'])

        parsed_race_eth = tableau_parser.extract_data_from_key(
            'Race/Ethnicity Epi')
        parsed_race_eth_df = pd.DataFrame.from_dict(parsed_race_eth).set_index(
            'Raceeth')
        aa_cases = parsed_race_eth_df.loc['Black, non-Hispanic'][
            'AGG(RecordCount)']
        known_race_cases = cases - parsed_race_eth_df.loc['Unknown'][
            'AGG(RecordCount)']

        assert deaths_results.requests[
            'deaths'], 'No results found for `deaths`'
        resp_body = deaths_results.requests['deaths'].response.body.decode(
            'utf8')
        tableau_parser = TableauParser(resp_body)
        parsed_death_cases = tableau_parser.extract_data_from_key(
            'Number of deaths')
        assert 'SUM(Death count)' in parsed_death_cases, 'Death count not found'
        assert len(parsed_death_cases['SUM(Death count)']
                   ) == 1, 'Parsing error might have occurred.'
        deaths = pydash.head(parsed_death_cases['SUM(Death count)'])

        parsed_deaths_by_race = tableau_parser.extract_data_from_key(
            'Death Race/Ethnicity')
        parsed_deaths_by_race_df = pd.DataFrame.from_dict(
            parsed_deaths_by_race).set_index('Raceeth')
        aa_deaths = parsed_deaths_by_race_df.loc['Black, non-Hispanic'][
            'AGG(RecordCount)']
        known_race_deaths = deaths - parsed_deaths_by_race_df.loc['Unknown'][
            'AGG(RecordCount)']

        pct_aa_cases = misc.to_percentage(aa_cases, known_race_cases)
        pct_aa_deaths = misc.to_percentage(aa_deaths, known_race_deaths)

        return [
            self._make_series(date=date,
                              cases=cases,
                              deaths=deaths,
                              aa_cases=aa_cases,
                              aa_deaths=aa_deaths,
                              pct_aa_cases=pct_aa_cases,
                              pct_aa_deaths=pct_aa_deaths,
                              pct_includes_unknown_race=False,
                              pct_includes_hispanic_black=False,
                              known_race_cases=known_race_cases,
                              known_race_deaths=known_race_deaths)
        ]
Beispiel #32
0
def remove_prebattle_player(id):
    player = _.head(_prebattle_players.where(id=id))
    if player:
        _prebattle_players.remove(player)
        messages.publish(PrebattlePlayerMessage("removed", player))
    def _scrape(self, **kwargs):
        runner = WebdriverRunner()

        results = runner.run(WebdriverSteps().go_to_url(
            self.HOME_URL).get_page_source())
        date = self.parse_date(results.page_source)

        results = runner.run(WebdriverSteps().go_to_url(
            self.URL
        ).wait_for_number_of_elements((By.XPATH, '//canvas'), 32).find_request(
            'cases', tableau.find_tableau_request
        ).clear_request_history().find_element_by_xpath(
            '//*[@id="tabZoneId4"]/div/div/div/span[2]/div/span/span/span[2]/div[2]/div'
        ).click_on_last_element_found().wait_for_number_of_elements(
            (By.XPATH, "//span[contains(text(), 'Deaths')]"), 6).find_request(
                'deaths',
                find_by=lambda r: 'set-active-story-point' in r.path))

        parser = tableau.TableauParser(request=results.requests['cases'])
        cases = pydash.head(
            parser.extract_data_from_key('Cases')['SUM(Number of Records)'])
        cases_pct_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('Race_Cases')).set_index(
                'Assigned Race')
        cases_df = cases_pct_df.assign(Count=[
            round(v * cases)
            for v in cases_pct_df['SUM(Number of Records)'].values
        ])
        aa_cases = cases_df.loc['Black']['Count']
        known_race_cases = cases - cases_df.loc['Unknown']['Count']

        parser = tableau.TableauParser(request=results.requests['deaths'])
        deaths = pydash.head(
            parser.extract_data_from_key('NumberDeaths')
            ['SUM(Number of Records)'])
        deaths_pct_df = pd.DataFrame.from_dict(
            parser.extract_data_from_key('Race_Deaths')).set_index(
                'Assigned Race')
        deaths_df = deaths_pct_df.assign(Count=[
            round(v * deaths)
            for v in deaths_pct_df['SUM(Number of Records)'].values
        ])
        aa_deaths = deaths_df.loc['Black']['Count']
        known_race_deaths = deaths - deaths_df.loc['Unknown']['Count']

        pct_aa_cases = misc.to_percentage(aa_cases, known_race_cases)
        pct_aa_deaths = misc.to_percentage(aa_deaths, known_race_deaths)

        return [
            self._make_series(date=date,
                              cases=cases,
                              deaths=deaths,
                              aa_cases=aa_cases,
                              aa_deaths=aa_deaths,
                              pct_aa_cases=pct_aa_cases,
                              pct_aa_deaths=pct_aa_deaths,
                              pct_includes_unknown_race=False,
                              pct_includes_hispanic_black=True,
                              known_race_cases=known_race_cases,
                              known_race_deaths=known_race_deaths)
        ]
Beispiel #34
0
def get_player_vehicle(player_id):
	return _.head(_vehicles.where(player_id=player_id))
Beispiel #35
0
def test_head(case, expected):
    assert _.head(case) == expected
Beispiel #36
0
def remove_user(id):
	old_user = _.head(_users.where(id=id))
	if not old_user:
		return
	_users.remove(old_user)
	messages.publish(UserMessage("removed", old_user))