def create_roles_table(conn, roles, table_name='roles'): column_constraints = { 'id': 'primary key', } with conn: schema = tables.generate_table_schema( roles, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) tables.populate_table(conn, schema, table_name, roles)
def create_players_table(conn, players, table_name='players'): column_constraints = { 'id': 'primary key', 'team': 'references teams(id)', 'element_type': 'references roles(id)' } with conn: schema = tables.generate_table_schema( players, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) tables.populate_table(conn, schema, table_name, players)
def create_teams_table(conn, teams, table_name='teams'): column_constraints = {'id': 'primary key'} exclude_columns = {'current_event_fixture', 'next_event_fixture', 'form'} with conn: schema = tables.generate_table_schema( teams, exclude_columns=exclude_columns, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) tables.populate_table(conn, schema, table_name, teams, exclude_columns=exclude_columns)
def search_crypto(): global crypto_dict os.system("cls") check_crypto_dict() for el in crypto_dict: print(el, end = " ") print("") symbol = input("Enter symbol of crypto currency: ") url_id = crypto_dict[symbol.upper()] data, info = api.get_info(url_id, option="search") table = tables.create_table(data, "search") os.system("cls") print(table) print("Description: " + info["description"]) print("Website: " + info["urls"]["website"][0]) if info["urls"]["website"] else print("Website: /") print("Source code: " + info["urls"]["source_code"][0]) if info["urls"]["source_code"] else print("Souce Code: /") print("") choice = input("Do you want to search again? (y/n) ") search_crypto() if choice.lower() == "y" or choice.lower() =="yes" else main()
def top_list(): data = api.get_listings() table = tables.create_table(data,"top_list") print(table) input("Press any key to return to Main Menu...") main()
def create_player_match_details_table(conn, element_summaries, table_name='player_match_details'): example = element_summaries[0] column_constraints = { 'id': 'primary key', 'fixture': 'references fixtures(id)', 'element': 'references players(id)', 'round': 'references events(id)', 'opponent_team': 'references teams(id)' } with conn: schema = tables.generate_table_schema( example, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) for e in element_summaries: tables.populate_table(conn, schema, table_name, e)
def create_fixtures_table(conn, fixtures, table_name='fixtures'): exclude_columns = {'stats'} column_constraints = { 'id': 'primary key', 'team_a': 'references teams(id)', 'team_h': 'references teams(id)', 'event': 'references events(id)' } with conn: schema = tables.generate_table_schema( fixtures, exclude_columns=exclude_columns, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) tables.populate_table(conn, schema, table_name, fixtures, exclude_columns=exclude_columns)
def create_roles_table(conn, roles, table_name='roles'): column_constraints = { 'id': 'primary key', } exclude_columns = { "chip_plays", "top_element_info", "sub_positions_locked", "team_division" } with conn: schema = tables.generate_table_schema( roles, exclude_columns=exclude_columns, column_constraints=column_constraints) tables.create_table(conn, schema, table_name) tables.populate_table(conn, schema, table_name, roles, exclude_columns=exclude_columns)
def GET(self): return _tables.create_table(working_directory)
high = tb.Float32Col() #IGNORE:E1101 low = tb.Float32Col() #IGNORE:E1101 close = tb.Float32Col() #IGNORE:E1101 vol = tb.UInt64Col() #IGNORE:E1101 amount = tb.UInt64Col() #IGNORE:E1101 def save_data(self, data): def fmtdatestr(datetime): return int(datetime.replace('-','').replace(':','')) row = self.row for i, record in data.iterrows(): row['datetime'] = fmtdatestr(record['date']) row['open'] = record['open'] row['high'] = record['high'] row['low'] = record['low'] row['close'] = record['close'] row['vol'] = record['vol'] row['amount'] = record['amount'] row.append() self.flush() if __name__ == '__main__': cl=util_hdf5_setting() #aa=tdx_f.fetch_get_stock_day('000001', '2013-07-01', '2013-07-09')[['date','open','high','low','close','vol','amount']] #cl.create_table('SH600000') #cl.save_data(aa) tb=cl['SZM'] tb.create_table('SZ000402') print(tb[1:10]) print(tb.query('datetime>20180901'))
friend_count INT, location VARCHAR(255) );""" } #connect to the MySQL database cnx = mysql_schema.mysql_connect(user, password) #user and password cursor = cnx.cursor() #create a database and select it. test_db = 'Tester' mysql_db.create_database(cursor, test_db) mysql_db.use_database(cursor, test_db) #create a table and perform CRUD statements tables.create_table(cursor, twitter_tables) #inserts - single and bulk insert_s = f"INSERT INTO {t_name} VALUES(1,'egg','{DT.now().strftime('%Y-%m-%d %H:%M:%S')}','hello', False, 4, 4, 'Land down under')" bulk_insert = ( f"INSERT INTO {t_name} VALUES(2,'hatchling','{DT.now().strftime('%Y-%m-%d %H:%M:%S')}','hello',True,6,9,'Wales');" f"INSERT INTO {t_name} VALUES(3, 'bird','{DT.now().strftime('%Y-%m-%d %H:%M:%S')}'," f"'hello', False, 10000, 9, 'Space');" f"INSERT INTO {t_name} VALUES(4, 'velo','{DT.now().strftime('%Y-%m-%d %H:%M:%S')}'," f"'Hi', True, 10000, 100, 'SoHo');") tables.insert_into_table(cursor, t_name, insert_s, cnx) tables.bulk_insert_into_table(cursor, bulk_insert, cnx) #queries query = f"SELECT * FROM {t_name}"
def parse_value(data): """ """ #user #tweet new_tweet = create_new_tweet(data) create_new_mention(data, new_tweet) create_new_hashtags(data, new_tweet) create_new_urls(data, new_tweet) def parse_each_file(onlyjson): """ Insert the content of the file into the DB """ total_file = len(onlyjson) for i in onlyjson: print(str(onlyjson.index(i)) + ' / ' + str(total_file)) data = get_the_json_value(i) parse_value(data) if __name__ == '__main__': #todo: ##need to add comments create_table() #import ipdb; ipdb.set_trace() json_files = get_all_the_json_files() parse_each_file(json_files)