Example #1
0
def _get_alerts(dbclient: DBClient, args):
    rows = dbclient.get_alerts_by_user(
        args.user_id) if args.user_id else dbclient.get_alerts_all()

    print(header_alerts)
    for alert in rows:
        print(alert2str(alert))
Example #2
0
    def _init_db(self):
        if not self._owned_games_db:
            self._owned_games_db = DBClient(self._client.owned_games_db_path)

        if not self._local_games_db:
            self._local_games_db = DBClient(
                self._client.installed_games_db_path)
Example #3
0
def main():

    logger = get_root_logger()
    get_header(logger, 'LOADING PROJECTIONS')

    client = APIClient()

    # grab dataframe shape from a trial run
    data = client.get_data('weekly-projections', 'json', 'QB')
    test_df = json_normalize(data['Projections'])

    # get DF structure from columns in test_df
    cols = test_df.columns
    df = DataFrame(columns=cols)

    # grab current week
    current_week = test_df.week.values[0]

    # loop through all weeks up to current week
    for wk in [str(x) for x in range(int(current_week))]:
        logger.info('Processing projections for week {0}'.format(int(wk) + 1))
        # loop through all positions
        for pos in ['QB', 'RB', 'WR', 'TE', 'K', 'DEF']:
            tmp_data = client.get_data('weekly-projections', 'json', pos, wk)
            tmp_df = json_normalize(tmp_data['Projections'])
            df = df.append(tmp_df)

    # import this df directly to PG DB
    conn = DBClient()
    conn.load(df, 'projections', schema='raw', if_exists='replace')
Example #4
0
def _get_events(dbclient: DBClient, args):
    rows = dbclient.get_events_by_user(
        args.user_id) if args.user_id else dbclient.get_events_all()

    print(header_events)
    for event in rows:
        print(event2str(event))
Example #5
0
def _delete_rule(dbclient: DBClient, args):
    if not args.rule_id:
        print('rule_id argument is missing')
        sys.exit(1)

    print(f'Deleting rule \'{args.rule_id}\'...')
    dbclient.delete_rule(args.rule_id)
    print('Done.')
Example #6
0
def _add_event(dbclient: DBClient, args):
    if not (args.user_id and args.ts and args.type and args.description):
        logging.error('Invalid arguments')
        sys.exit(1)

    timestamp = datetime.fromisoformat(args.ts).astimezone()
    dbclient.insert_event(args.user_id, timestamp, args.type, args.description)
    logging.info('OK')
Example #7
0
def _add_rule(dbclient: DBClient, args):
    spec = None

    with open(args.rule_spec) as spec_file:
        spec = json.load(spec_file)

    expr = '\n'.join(spec.get('expr'))

    if _rule_exists(spec.get('id'), dbclient.get_rules()):
        print(f'Rule \'{spec.get("id")}\' already exists.')
        sys.exit(1)

    print(f'Adding rule \'{spec.get("id")}\'...')
    dbclient.insert_rule(spec.get('id'), spec.get('priority'),
                         spec.get('summary'), expr, spec.get('msg'))
    print('Done.')
Example #8
0
def main():

    logger = get_root_logger()
    get_header(logger, "Importing Bye weeks")

    client = APIClient()
    data = client.get_data("byes")

    df = None
    for key in data.keys():
        # build DF the first time through the loop
        if df is None:
            df = json_normalize(data[key])
        # append every other time
        else:
            df = df.append(json_normalize(data[key]))

    # import this df directly to PG DB
    conn = DBClient()
    conn.load(df, "byes", schema="raw", if_exists="replace")
Example #9
0
    def process(self, request):
        start_time = time.time()

        client = DBClient()
        client.connect()

        req = db_pb2.DBRequest()
        res = db_pb2.DBResponse()
        req.ParseFromString(request)
        res.seq_id = req.seq_id

        status = -1
        for query in req.query:
            logger.debug("process query[sql=%s]" % query)
            result = res.result.add()
            if query.type == Query.SELECT:
                status = self._select(client, query, result)
            elif query.type == Query.INSERT:
                status = self._insert(client, query, result)
            elif query.type == Query.UPDATE:
                status = self._update(client, query, result)
            elif query.type == Query.DELETE:
                status = self._delete(client, query, result)

            if status != 0:
                break

        client.commit()

        res.status = status
        consume_time = int((time.time() - start_time) * 1000)
        logger.notice("Query[req=%s][res=%s][consume=%d ms]" % (req, res, consume_time))

        return res.SerializeToString()
Example #10
0
def main():
    
    path = '{0}/data/salaries/*'.format(getenv('BASEDIR'))
    files_list = glob(path)
    
    for i, f in enumerate(files_list):
        df = read_csv(f, sep=';')
        df.rename(columns={'h/a': 'homeoraway',
                            'DK points': 'dk_proj_points',
                            'DK salary': 'dk_salary'
                }, inplace=True
        )

        # first iteration, create a new df
        if i == 0:
            all_df = df
        # future iterations, append to is
        else:
            all_df = all_df.append(df)
        
    conn = DBClient()
    conn.load(all_df, 'salaries', if_exists='replace', schema='raw')
Example #11
0
def _scenario_pro_deterioration(dbclient: DBClient):
    timestamp = datetime.now().astimezone()
    dbclient.insert_event('1', timestamp - timedelta(days=2), 'PRO',
                          '10 - Low')
    dbclient.insert_event('1', timestamp - timedelta(days=1), 'PRO',
                          '15 - Mid')
    dbclient.insert_event('1', timestamp - timedelta(days=0), 'PRO',
                          '20 - High')
Example #12
0
def main():
    args = parse_args()

    if args.debug:
        logging.getLogger().setLevel(logging.DEBUG)

    config = None
    with open(args.config) as config_file:
        config = json.load(config_file)

    db = DBClient(config)

    command = commands.get(args.command)

    if command:
        command(db, args)
    else:
        logging.error(f'command {args.command} is not supported')
        sys.exit(1)
Example #13
0
    def __init__(self):

        # Clients
        self.db = DBClient()
        self.registry = RegistryClient()

        # All setflag locks in the current tick
        self.setflag_locks = []

        # A lock that guards self.setflag_locks
        self.setflag_locks_list_lock = threading.Lock()

        # Logs
        self.log = logging.getLogger('scriptbot.scheduler')
        self.log.setLevel(settings.LOG_LEVEL)
        self.log.addHandler(
            logstash.TCPLogstashHandler(LOGSTASH_IP, LOGSTASH_PORT, version=1))
        self.log.info('#' * 80)
        self.log.info("Initialization")
        self.log.info('#' * 80)
 def _init_db(self):
     if not self._owned_games_db and self._client.is_installed:
         self._owned_games_db = DBClient(self._client.owned_games_db_path)
Example #15
0
    def load(self, df, table):

        # import to DB
        conn = DBClient()
        conn.load(df, table, schema='raw', if_exists='replace')
        self.logger.info('Successfully loaded {0} rows into {1}'.format(len(df), table))
Example #16
0
 def __init__(self):
     self.db = DBClient()
class AmazonGamesPlugin(Plugin):
    _owned_games_db = None
    _local_games_cache = {}
    _owned_games_cache = {}
    _client = None

    def __init__(self, reader, writer, token):
        super().__init__(Platform.Amazon, __version__, reader, writer, token)
        self.logger = logging.getLogger('amazonPlugin')
        self._client = AmazonGamesClient()

        self.logger.info("Plugin __init__")

    def _init_db(self):
        if not self._owned_games_db and self._client.is_installed:
            self._owned_games_db = DBClient(self._client.owned_games_db_path)

    def _on_auth(self):
        self.logger.info("Auth finished")
        self._init_db()

        self.store_credentials({'creds': 'dummy_data_because_local_app'})
        return Authentication('amazon_user_id', 'Amazon Games User')

    def _get_owned_games(self):
        try:
            return {
                row['ProductIdStr']:
                Game(row['ProductIdStr'],
                     row['ProductTitle'],
                     dlcs=None,
                     license_info=LicenseInfo(LicenseType.SinglePurchase))
                for row in self._owned_games_db.select(
                    'DbSet', rows=['ProductIdStr', 'ProductTitle'])
            }
        except Exception:
            self.logger.exception('Failed to get owned games')
            return {}

    def _update_owned_games_cache(self):
        owned_games = self._get_owned_games()

        for game_id in self._owned_games_cache.keys() - owned_games.keys():
            self.remove_game(game_id)

        for game_id in (owned_games.keys() - self._owned_games_cache.keys()):
            self.add_game(owned_games[game_id])

        self._owned_games_cache = owned_games

    def _get_local_games(self):
        try:
            return {
                row['game_id']: LocalGame(row['game_id'],
                                          LocalGameState.Installed)
                for row in self._client.get_installed_games()
            }
        except Exception:
            self.logger.exception('Failed to get local games')
            return {}

    def _update_local_games_state(self):
        local_games = self._get_local_games()

        for game_id in self._local_games_cache.keys() - local_games.keys():
            self.update_local_game_status(
                LocalGame(game_id, LocalGameState.None_))

        for game_id, local_game in local_games.items():
            old_game = self._local_games_cache.get(game_id)
            if old_game is None or old_game.local_game_state != local_game.local_game_state:
                self.update_local_game_status(local_game)

        self._local_games_cache = local_games

    @staticmethod
    def _scheme_command(command, game_id):
        webbrowser.open(f'amazon-games://{command}/{game_id}')

    #
    # Galaxy Plugin methods
    #

    async def authenticate(self, stored_credentials=None):
        self.logger.info("Plugin authenticate")

        if not stored_credentials:
            return create_next_step(START_URI.SPLASH, END_URI.SPLASH_CONTINUE)

        return self._on_auth()

    async def pass_login_credentials(self, step, credentials, cookies):
        if 'splash_continue' in credentials[
                'end_uri'] or 'missing_app_retry' in credentials['end_uri']:
            if not self._client.is_installed:
                return create_next_step(START_URI.MISSING_APP,
                                        END_URI.MISSING_APP_RETRY)

            return self._on_auth()

        return create_next_step(START_URI.SPLASH, END_URI.SPLASH_CONTINUE)

    async def get_owned_games(self):
        return list(self._owned_games_cache.values())

    async def get_local_games(self):
        return [game for game in self._local_games_cache.values()]

    def handshake_complete(self):
        self._client.update_install_location()
        self._init_db()
        if self._client.is_installed:
            self._update_local_games_state()
            self._update_owned_games_cache()

    def tick(self):
        self._client.update_install_location()
        self._init_db()
        if self._client.is_installed:
            self._update_local_games_state()
            self._update_local_games_state()

    async def launch_game(self, game_id):
        AmazonGamesPlugin._scheme_command('play', game_id)

    async def install_game(self, game_id):
        AmazonGamesPlugin._scheme_command('play', game_id)

    async def uninstall_game(self, game_id):
        self.logger.info(f'Uninstalling game {game_id}')
        self._client.uninstall_game(game_id)

    async def launch_platform_client(self):
        self._client.start_client()

    async def shutdown_platform_client(self):
        self._client.stop_client()

    async def get_os_compatibility(self, game_id, context):
        return OSCompatibility.Windows
Example #18
0
def _scenario_activity_endorsement(dbclient: DBClient):
    timestamp = datetime.now().astimezone()
    dbclient.insert_event('1', timestamp - timedelta(hours=10), 'steps',
                          '10000')
    dbclient.insert_event('1', timestamp - timedelta(hours=8), 'mind',
                          'Quality of Sleep')
    dbclient.insert_event('1', timestamp - timedelta(hours=7), 'water',
                          '2 cups')
    dbclient.insert_event('1', timestamp - timedelta(hours=6), 'mind',
                          'Energy level')
    dbclient.insert_event('1', timestamp - timedelta(hours=4), 'medication',
                          'Done')
    dbclient.insert_event('1', timestamp - timedelta(hours=2), 'PRO',
                          '5 - Low')
Example #19
0
def _scenario_inactivity(dbclient: DBClient):
    timestamp = datetime.now().astimezone()
    dbclient.insert_event('1', timestamp - timedelta(hours=25), 'steps',
                          '10000')
Example #20
0
def _scenario_missing_medication(dbclient: DBClient):
    timestamp = datetime.now().astimezone()
    dbclient.insert_event('1', timestamp - timedelta(days=3), 'medication',
                          'done')
    dbclient.insert_event('1', timestamp - timedelta(hours=2), 'water',
                          '3 cups')
Example #21
0

def get_receiver_user_id_str(tweet, receiver_screen_name):
    for mention in tweet["entities"]["user_mentions"]:
        if mention["screen_name"] == receiver_screen_name:
            return mention["id"]

    raise Exception("not match receiver_user")


if __name__ == '__main__':
    config = toml.load(open(config_path))

    t_client = TwitterClient(config["twitter"])
    w_client = WalletClient(config["wallet"])
    d_client = DBClient(config["database"])

    print("Worker Run")

    timeline = t_client.stream_bot_timeline()
    for line in timeline.iter_lines():
        try:
            tweet = json.loads(line.decode("utf-8"))

            tweet_id_str = tweet["id_str"]  # リプライ時に利用する
            sender_user_id_str = tweet["user"]["id_str"]
            sender_user_screen_name = tweet["user"]["screen_name"]
            tweet_dict = tweet["text"].split(" ")

            # Botへのメンションチェック
            if tweet_dict[0] != t_client.bot_name:
Example #22
0
class DBInit():
    """Clear and initialize the evictions database."""
    def __init__(self):
        self.db = DBClient()

    def evictions_init(self, level):
        """Clear and initialize the evictions table(s)."""

        logger.info("Dropping table {}...".format(level))
        self.db.write([db_statements.DROP_TABLE_EV_LAB.format(level)])
        logger.info("Creating table {}...".format(level))
        self.db.write([db_statements.CREATE_TABLE_EV_LAB.format(level)])
        logger.info("Copying table...")
        self.db.copy('data/raw/{}.csv'.format(level),
                     db_statements.COPY_CSV_EVICTIONS.format(level))
        logger.info("Records committed.")
        logger.info("Creating indexes...")
        self.db.write([
            db_statements.IDX_STATE_YEAR.format(level, level),
            db_statements.IDX_YEAR.format(level, level),
            db_statements.IDX_STATE.format(level, level),
            db_statements.IDX_EVICTIONS.format(level, level),
            db_statements.IDX_STATE_YEAR.format(level, level),
            db_statements.IDX_GEOID.format(level, level),
            db_statements.IDX_GEOID_YEAR.format(level, level),
        ])
        logger.info("Indexes created.")
        logger.info("Adding sub-geography columns...")
        self.db.write([
            db_statements.CREATE_VAR_STATE.format(level),
            db_statements.CREATE_VAR_COUNTY.format(level),
            db_statements.UPDATE_VAR_STATE.format(level),
            db_statements.UPDATE_VAR_COUNTY.format(level)
        ])
        if level == "blockgroup":
            self.db.write([
                db_statements.CREATE_VAR_TRACT, db_statements.UPDATE_VAR_TRACT
            ])
        logger.info("Sub-geography columns added...")
        logger.info("Dropping records outside SA/ENC Divisions...")
        self.db.write([db_statements.DROP_STATE.format(level)])
        logger.info("{} table completed.".format(level))

    def geo_init(self):
        """Clear and initialize Postgis."""

        self.db.write([
            db_statements.CREATE_EXT_POSTGIS, db_statements.CREATE_EXT_FUZZY,
            db_statements.CREATE_EXT_TIGER,
            db_statements.CREATE_EXT_POSTGIS_TOP, db_statements.DROP_F_EXEC,
            db_statements.CREATE_F_EXEC,
            db_statements.ALTER_SPATIAL_REF_SYS.format(self.db.DB_USER)
        ])

    def census_shp(self, geography):
        """Read shapes for a given geography."""
        DROP_TABLE_SHP = db_statements.DROP_TABLE_SHP.format(geography)
        self.db.write([DROP_TABLE_SHP])

        shp_read = "shp2pgsql -s 4269:4326 -W 'latin1' data/tl_2010_us_{}10/tl_2010_us_{}10.shp evictions.census_{}_shp | psql {} -U {} -W {} -p {} -h {}".format(
            geography, geography, geography, 'evictions', self.db.DB_USER,
            self.db.DB_PASSWORD, self.db.DB_PORT, self.db.DB_HOST)
        os.system(shp_read)

    def create_n_year_average(self, source_table, source_col, target_table,
                              lag):
        """Create n-year (lag) average for given attribute, source table, and target table"""

        logger.info("Adding {} year average to {} for feature {}".format(
            lag, target_table, source_col))
        target_col = '{}_avg_{}yr'.format(source_col, lag)
        DROP_COLUMN = db_statements.DROP_COLUMN.format(target_table,
                                                       target_col)
        ADD_COLUMN = db_statements.ADD_COLUMN.format(target_table, target_col,
                                                     "FLOAT4")
        INSERT_N_YEAR_AVG = db_statements.INSERT_N_YEAR_AVG.format(
            target_table, target_col, source_col, source_table, source_table,
            lag)

        logger.info("Running:")
        logger.debug(INSERT_N_YEAR_AVG)
        try:
            self.db.write([DROP_COLUMN, ADD_COLUMN, INSERT_N_YEAR_AVG])
        except Exception as e:
            logger.error(e)
            return False

        logger.info("Added {} year average to {} for feature {}".format(
            lag, target_table, source_col))
        return True

    def create_n_year_pct_change(self, source_table, source_col, target_table,
                                 lag):
        """Create n-year (lag) percentage change for given attribute, source table, and target table"""

        logger.info("Adding {} year pct change to {} for feature {}".format(
            lag, target_table, source_col))
        target_col = '{}_pct_change_{}yr'.format(source_col, lag)
        DROP_COLUMN = db_statements.DROP_COLUMN.format(target_table,
                                                       target_col)
        ADD_COLUMN = db_statements.ADD_COLUMN.format(target_table, target_col,
                                                     "FLOAT")
        INSERT_N_YEAR_PCT_CHANGE = db_statements.INSERT_N_YEAR_PCT_CHANGE \
            .format(target_table, target_col,
                    source_col, source_col,
                    source_col, source_col,
                    source_col, source_col,
                    source_col, source_col, source_col,
                    source_table, source_table,
                    lag)

        logger.info("Running:")
        logger.debug(INSERT_N_YEAR_PCT_CHANGE)
        try:
            self.db.write([DROP_COLUMN, ADD_COLUMN, INSERT_N_YEAR_PCT_CHANGE])
        except Exception as e:
            logger.error(e)
            return False

        logger.info("Added {} year pct change to {} for feature {}".format(
            lag, target_table, source_col))
        return True

    def create_geo_features_table(self):
        """Create geographic features table"""

        self.db.write(
            [db_statements.DROP_TABLE_URBAN, db_statements.CREATE_TABLE_URBAN])

        logger.info("Create urban table")

        df = pd.read_csv('data/raw/Urban_County_2010.csv', header=0)
        df = df[['UA', 'STATE', 'COUNTY', 'GEOID']]
        df.to_csv('data/raw/Urban_County_2010_sub.csv', index=False)

        self.db.copy('data/raw/Urban_County_2010_sub.csv',
                     db_statements.COPY_CSV_URBAN)

        logger.info("Creating geo table...")
        self.db.write([
            db_statements.DROP_TABLE_GEOGRAPHIC,
            db_statements.CREATE_TABLE_GEOGRAPHIC
        ])

        logger.info("Updating geo table...")
        self.db.write([db_statements.ALTER_TABLE_GEOGRAPHIC])

        logger.info("Geo table created.")

        logger.info("Creating indexes...")
        self.db.write(
            [db_statements.IDX_COUNTY_GEO, db_statements.IDX_STATE_GEO])

        logger.info("Updating table...")

        self.db.write([
            db_statements.UPDATE_VAR_DIV_NE, db_statements.UPDATE_VAR_DIV_MA,
            db_statements.UPDATE_VAR_DIV_ENC, db_statements.UPDATE_VAR_DIV_WNC,
            db_statements.UPDATE_VAR_DIV_SA, db_statements.UPDATE_VAR_DIV_ESC,
            db_statements.UPDATE_VAR_DIV_WSC, db_statements.UPDATE_VAR_DIV_MNT,
            db_statements.UPDATE_VAR_DIV_PAC
        ])

        logger.info("updating urban")
        self.db.write([db_statements.UPDATE_VAR_URBAN])

        logger.info("Regional dummies and urban updated.")

    def create_outcome_table(self, start_year, end_year):
        """Create outcomes table with different outcomes measures"""

        DROP_TABLE_OUTCOME = db_statements.DROP_TABLE_OUTCOME
        CREATE_TABLE_OUTCOME = db_statements.CREATE_TABLE_OUTCOME

        write_list = []

        for year in range(start_year, end_year):
            INSERT_OUTCOMES = db_statements.INSERT_OUTCOMES.format(year, year)
            write_list.append(INSERT_OUTCOMES)

        logger.debug(INSERT_OUTCOMES)
        try:
            self.db.write(write_list)
        except Exception as e:
            logger.error(e)
            return False

        return True

    def update_outcome_change_cat(self,
                                  col_name,
                                  col_type,
                                  existing_col,
                                  zero_to_one=True):
        """Update outcomes table to add category change variables"""

        DROP_COLUMN = db.statements.DROP_COLUMN.format('outcome', col_name)
        ADD_COLUMN = db_statements.ADD_COLUMN.format('outcome', col_name,
                                                     col_type)

        if zero_to_one:
            OUTCOME_CAT_CHANGE = db_statements.OUTCOME_CAT_CHANGE_0_1.format(
                col_name, existing_col, existing_col)
        else:
            OUTCOME_CAT_CHANGE = db_statements.OUTCOME_CAT_CHANGE_1_0.format(
                col_name, existing_col, existing_col)

        logger.debug(OUTCOME_CAT_CHANGE)
        try:
            self.db.write([DROP_COLUMN, ADD_COLUMN, OUTCOME_CAT_CHANGE])
        except Exception as e:
            logger.error(e)
            return False

        return True

    # TODO RENAME
    def create_ntile_discretization(self,
                                    source_col,
                                    target_table,
                                    col_type,
                                    num_buckets=4):
        """discretize a given column into a given number of buckets"""
        target_col = '{}_{}tiles'.format(source_col, num_buckets)
        DROP_COLUMN = db_statements.DROP_COLUMN.format(target_table,
                                                       target_col)
        ADD_COLUMN = db_statements.ADD_COLUMN.format(target_table, target_col,
                                                     col_type)
        INSERT_NTILE_DISCRETIZATION = db_statements.INSERT_NTILE_DISCRETIZATION.format(
            target_table, target_col, num_buckets, source_col, target_col)

        logger.debug(INSERT_NTILE_DISCRETIZATION)
        try:
            self.db.write(
                [DROP_COLUMN, ADD_COLUMN, INSERT_NTILE_DISCRETIZATION])
        except Exception as e:
            logger.error(e)
            return False

        return True

    def permit_import(self):
        """Create permits table and import permit data from csv"""

        self.db.write([
            db_statements.DROP_TABLE_PERMITS,
            db_statements.CREATE_TABLE_PERMITS
        ])
        self.db.copy('data/raw/permits.csv', db_statements.COPY_CSV_PERMITS)

        return True

    def hhsize_import(self):
        """Create household size table and import household size data from csv"""

        self.db.write([
            db_statements.DROP_TABLE_HHSIZE, db_statements.CREATE_TABLE_HHSIZE
        ])
        self.db.copy('data/raw/hs_final.csv', db_statements.COPY_CSV_HHSIZE)
        self.db.write([
            db_statements.CREATE_VAR_HHSIZE, db_statements.UPDATE_VAR_HHSIZE,
            db_statements.DROP_TABLE_HHSIZE
        ])
        return True

    def ev_lag_tr(self):
        """Create lagged outcome features (one year lag) for tract table"""
        self.db.write(["drop table if exists ev_lag_tr;"])
        self.db.write([db_statements.CREATE_EV_TABLE.format("tr", "tr")])
        self.db.write(["create index lag_gy on ev_lag_tr (geo_id, year);"])
        self.db.write(["create index lag_y on ev_lag_tr (year);"])
        self.db.write([db_statements.UPDATE_COLS_LAG_TR])
        self.db.write(["drop table ev_lag_tr;"])

    def ev_lag_bg(self):
        """Create lagged outcome features (one year lag) for blockgroup table"""
        self.db.write(["drop table if exists ev_lag_blockgroup;"])
        self.db.write(
            [db_statements.CREATE_EV_TABLE.format("blockgroup", "blockgroup")])
        self.db.write(
            ["create index lag_gy_bg on ev_lag_blockgroup (geo_id, year);"])
        self.db.write(["create index lag_y_bg on ev_lag_blockgroup (year);"])
        self.db.write([db_statements.UPDATE_COLS_LAG_BG])
        self.db.write(["drop table ev_lag_blockgroup;"])

    def rem_9_ev(self, lag, col, tr):
        """Remove 999999 outlier values from evictions pct change columns, replace with max for given year. These values were created
           for cases in which a feature value moved from 0 to a non-zero value (an infinite percent change). We replace these
           values with the maximum percent change for the given feature in that year."""
        logger.info(
            "removing 999999 from lagged {} for {} yr pct change where tract is {}"
            .format(col, lag, tr))
        try:
            self.db.write([
                db_statements.REM_999999_ev.format(col, lag, tr, col, lag, tr,
                                                   col, lag, tr, col, lag, tr)
            ])
        except Exception as e:
            logger.error(e)
            return False
        logger.info(
            "removed 999999 from lagged {} for {} yr pct change".format(
                col, lag))
        return True

    def rem_9(self, lag, col, tr):
        """Remove 999999 outlier values from pct change columns, replace with max for given year. These values were created
           for cases in which a feature value moved from 0 to a non-zero value (an infinite percent change). We replace these
           values with the maximum percent change for the given feature in that year."""
        logger.info(
            "removing 999999 from {} for {} yr pct change where tract is {}".
            format(col, lag, tr))
        try:
            self.db.write([
                db_statements.REM_999999.format(col, lag, tr, col, lag, tr,
                                                col, lag, tr, col, lag, tr)
            ])
        except Exception as e:
            logger.error(e)
            return False
        logger.info("removed 999999 from {} for {} yr pct change".format(
            col, lag))
        return True
Example #23
0
#!/home/jjardel/fb/pkgs/envs/etl/bin/python

from api_client import APIClient
from db_client import DBClient
from pandas.io.json import json_normalize



client = APIClient()
data = client.get_data('nfl-teams')

df = json_normalize(data['NFLTeams'])


# import this df directly to PG DB
conn = DBClient()
conn.load(df, 'teams', schema='raw', if_exists='replace')

Example #24
0
def _clear_all(dbclient: DBClient, args):
    dbclient.clear_all()
Example #25
0
def _clear_events(dbclient: DBClient, args):
    dbclient.clear_events()
Example #26
0
def _get_rules(dbclient: DBClient, args):
    rows = dbclient.get_rules()

    print(header_rules)
    for rule in rows:
        print(rule2str(rule))
Example #27
0
class Treets(object):
    """TODO docstring for Treets"""
    def __init__(self):
        super(Treets, self).__init__()
        self.db_client = DBClient()
        self.data_converter = DataConverter()

    def all_tweets(self, limit=TWEETS_LIMIT):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_tweets(limit)
        return self.tweets_to_geojson(self.result)

    def all_traces(self, limit=TRACES_LIMIT):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_traces(limit)
        return self.traces_to_geojsons(self.result)

    def prepare_template_args(self, template_args, traces, tweets):
        template_args['shown_tweets'] = len(tweets['features'])
        template_args['shown_traces'] = len(traces['features'])
        template_args['tweets_geojson'] = str(tweets)
        template_args['traces_geojson'] = str(traces)

    def search_tweets_near_point(self, coords, dist):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_tweets_near_point(coords, dist)
        return self.tweets_to_geojson(self.result)

    def search_tweets_near_point_and_text(self, coords, text, dist):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_tweets_near_point_and_text(
            coords, dist, text)
        return self.tweets_to_geojson(self.result)

    def search_tweets_text(self, text):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_tweets_for_text(text)
        return self.tweets_to_geojson(self.result)

    def search_user_trace(self, text):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_trace_for_user(text)
        return self.traces_to_geojsons(self.result)

    def search_traces_near_point(self, coords, dist, limit):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_traces_near_point(coords, dist, limit)
        return self.traces_to_geojsons(self.result)

    def search_traces_near_point_and_text(self, coords, dist, text, limit):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_traces_near_point_and_text(
            coords, dist, text, limit)
        return self.traces_to_geojsons(self.result)

    def search_traces_text(self, text, limit):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_traces_for_text(text, limit)
        return self.traces_to_geojsons(self.result)

    def export_trace(self, user_name):
        '''
        TODO docstring
        '''
        self.result = self.db_client.get_tweets_for_user_str(user_name)[0]
        tweets_df = self.data_converter.tweets_to_table(self.result)
        fname = user_name + '_' + time.strftime("%Y%m%d-%H%M%S") + '.csv'
        tweets_df.to_csv(TMP_FOLDER + fname,
                         index=False,
                         sep=';',
                         encoding='utf-8',
                         decimal=',')
        return fname

    def tweets_to_geojson(self, result):
        '''
        TODO docstring
        '''
        return self.data_converter.tweets_to_feature_collection(result)

    def traces_to_geojsons(self, result):
        '''
        Returns a geojson fìcontaining all traces and a geojson containing all tweets
        '''
        return self.data_converter.traces_to_feature_collection(result)
Example #28
0
def _clear_alerts(dbclient: DBClient, args):
    dbclient.clear_alerts()
Example #29
0
 def __init__(self):
     super(Treets, self).__init__()
     self.db_client = DBClient()
     self.data_converter = DataConverter()
Example #30
0
from spotify_api_client import SpotifyClientAPI
import spotify_api_processor
from db_client import DBClient

client_id = os.getenv('spotify_client_id')
client_secret = os.getenv('spotify_client_secret')
DATABASE_URL = os.getenv('DATABASE_URL')

playlist_uri = sys.argv[1]
access_token = sys.argv[2]

api_client = SpotifyClientAPI(client_id=client_id,
                              client_secret=client_secret,
                              access_token=access_token)

db_client = DBClient(DATABASE_URL=DATABASE_URL)

playlist_id = spotify_api_processor.get_playlist_id(playlist_uri=playlist_uri)
playlist_name = api_client.get_playlist_info(playlist_id=playlist_id,
                                             fields='name')['name']
playlist_tracks = api_client.get_playlist_tracks(playlist_id=playlist_id)

db_client.create_table()

for item in playlist_tracks['items']:
    track_id = item['track']['id']
    track_uri = item['track']['uri']
    track_features = api_client.get_track_features(track_id=track_id,
                                                   track_uri=track_uri)
    if track_features == {}:
        continue
Example #31
0
# -*- encoding: utf-8 -*-

from db_client import DBClient
from lxml import etree
import json
import codecs

d = DBClient()


class HTMLBuilder(object):
    """
    build htmls which include forms - search, add, edit + view mode
    saves hierarchial structure of data using embedded divs
    """
    def __init__(self):
        info = json.loads(
            codecs.open("html_info.json", 'r', encoding='utf-8').read())
        self.fields = info['field_types']
        self.blocks = info['blocks']
        self.subblocks = info['subblocks']
        self.order = info['order']
        self.labels = info['labels']
        self.choices = info['choices']
        self.from_db = info['choices_from_db']
        self.buttons = info['buttons']
        self.parser = etree.HTMLParser()

    def create_html(self,
                    values,
                    mode,
Example #32
0
import sys

import init_env
import settings
import utils
from db_client import Advertiser, DBClient

if __name__ == "__main__":
    try:
        assert len(sys.argv) == 4
    except:
        print "usage : %s [username] [password] [email]"%sys.argv[0]
        exit(-1)

    name = sys.argv[1]
    pwd = utils.gen_secret(sys.argv[2])
    email = sys.argv[3]

    db = DBClient(settings.db_user, settings.db_password,
        settings.db_host, settings.db_name)
    l = len(db.select_all(Advertiser))

    user = Advertiser(name=name, password=pwd, email=email)
    db.do_save(user)
    assert len(db.select_all(Advertiser)) == l + 1
Example #33
0
class AmazonGamesPlugin(Plugin):
    _owned_games_db = None
    _local_games_db = None
    _owned_games_last_updated = 0
    _local_games_last_updated = 0

    def __init__(self, reader, writer, token):
        super().__init__(Platform.Amazon, __version__, reader, writer, token)
        self.logger = logging.getLogger('amazonPlugin')
        self._client = AmazonGamesClient()

        self._local_games_cache = None
        self._owned_games_cache = None

    def _init_db(self):
        if not self._owned_games_db:
            self._owned_games_db = DBClient(self._client.owned_games_db_path)

        if not self._local_games_db:
            self._local_games_db = DBClient(
                self._client.installed_games_db_path)

    def _on_auth(self):
        self.logger.info("Auth finished")
        self._init_db()

        self.store_credentials({'creds': 'dummy_data_because_local_app'})
        return Authentication('amazon_user_id', 'Amazon Games User')

    def _get_owned_games(self):
        try:
            return {
                row['ProductIdStr']:
                Game(row['ProductIdStr'],
                     row['ProductTitle'],
                     dlcs=None,
                     license_info=LicenseInfo(LicenseType.SinglePurchase))
                for row in self._owned_games_db.select(
                    'DbSet', rows=['ProductIdStr', 'ProductTitle'])
            }
        except Exception:
            self.logger.exception('Failed to get owned games')
            return {}

    def _update_owned_games(self):
        if (time() - self._owned_games_last_updated) < OWNED_GAMES_TIMEOUT:
            return

        owned_games = self._get_owned_games()

        for game_id in self._owned_games_cache.keys() - owned_games.keys():
            self.remove_game(game_id)

        for game_id in (owned_games.keys() - self._owned_games_cache.keys()):
            self.add_game(owned_games[game_id])

        self._owned_games_cache = owned_games
        self._owned_games_last_updated = time()

    def _get_local_games(self):
        try:
            return {
                row['Id']: LocalGame(row['Id'], LocalGameState.Installed)
                for row in self._local_games_db.select(
                    'DbSet', rows=['Id', 'Installed']) if row['Installed']
            }
        except Exception:
            self.logger.exception('Failed to get local games')
            return {}

    def _update_local_games(self):
        if (time() - self._local_games_last_updated) < LOCAL_GAMES_TIMEOUT:
            return

        local_games = self._get_local_games()

        for game_id in self._local_games_cache.keys() - local_games.keys():
            self.update_local_game_status(
                LocalGame(game_id, LocalGameState.None_))

        for game_id, local_game in local_games.items():
            old_game = self._local_games_cache.get(game_id)
            if old_game is None or old_game.local_game_state != local_game.local_game_state:
                self.update_local_game_status(local_game)

        self._local_games_cache = local_games
        self._local_games_last_updated = time()

    @staticmethod
    def _scheme_command(command, game_id):
        webbrowser.open(f'amazon-games://{command}/{game_id}')

    async def _ensure_initialization(self):
        await asyncio.sleep(FALLBACK_SYNC_TIMEOUT)

        if not self._client.is_installed:
            return

        if not self._local_games_cache:
            self.logger.info('Fallback initialization of `_local_games_cache`')
            self._local_games_cache = {}

        if not self._owned_games_cache:
            self.logger.info('Fallback initialization of `_owned_games_cache`')
            self._owned_games_cache = {}

    #
    # Galaxy Plugin methods
    #

    async def authenticate(self, stored_credentials=None):
        self.logger.info("Plugin authenticate")

        if not stored_credentials:
            return create_next_step(START_URI.SPLASH, END_URI.SPLASH_CONTINUE)

        return self._on_auth()

    async def pass_login_credentials(self, step, credentials, cookies):
        if any(x in credentials['end_uri']
               for x in ['splash_continue', 'missing_app_retry']):
            if not self._client.is_installed:
                return create_next_step(START_URI.MISSING_APP,
                                        END_URI.MISSING_APP_RETRY)

            return self._on_auth()

        return create_next_step(START_URI.SPLASH, END_URI.SPLASH_CONTINUE)

    async def get_owned_games(self):
        if self._owned_games_cache is None:
            self._owned_games_last_updated = time()
            self._owned_games_cache = self._get_owned_games()
        return list(self._owned_games_cache.values())

    async def get_local_games(self):
        if self._local_games_cache is None:
            self._local_games_last_updated = time()
            self._local_games_cache = self._get_local_games()
        return list(self._local_games_cache.values())

    def handshake_complete(self) -> None:
        self.create_task(self._ensure_initialization(),
                         '_ensure_initialization')

    def tick(self):
        self._client.update_install_location()
        if self._client.is_installed:
            if self._owned_games_db and self._owned_games_cache is not None:
                self._update_owned_games()

            if self._local_games_db and self._local_games_cache is not None:
                self._update_local_games()

    async def launch_game(self, game_id):
        AmazonGamesPlugin._scheme_command('play', game_id)

    async def install_game(self, game_id):
        # FIXME Opens launcher and an install dialog, but no action
        AmazonGamesPlugin._scheme_command('play', game_id)

    async def uninstall_game(self, game_id):
        self.logger.info(f'Uninstalling game {game_id}')
        self._client.uninstall_game(game_id)

    async def launch_platform_client(self):
        self._client.start_client()

    async def shutdown_platform_client(self):
        self._client.stop_client()

    async def get_os_compatibility(self, game_id, context):
        return OSCompatibility.Windows