def handle_aurora_crud(event, context): request_body = event['body'] event_type = request_body['eventType'] databaseName = os.environ['DatabaseName'] awsSecretStoreArn = os.environ['AwsSecretStoreArn'] dbClusterOrInstanceArn = os.environ['DbClusterArn'] if event_type == "createTable": sql.create_table(databaseName, awsSecretStoreArn, dbClusterOrInstanceArn) return {"success": True, "message": "Created Table"} if event_type == "getRecords": records = sql.get_records(databaseName, awsSecretStoreArn, dbClusterOrInstanceArn) return {"success": True, "records": records} if event_type == "saveRecord": record_info = request_body['recordInfo'] record_info['record_id'] = randint(1, 10000000) sql.create_record(databaseName, awsSecretStoreArn, dbClusterOrInstanceArn, record_info) return { "success": True, "message": "Saved Record", "record": record_info } if event_type == "updateRecord": record_info = request_body['recordInfo'] sql.update_record(databaseName, awsSecretStoreArn, dbClusterOrInstanceArn, record_info) return {"success": True, "message": "Updated Record"}
async def on_ready(): guild = discord.utils.get(bot.guilds) print(f'{bot.user.name} has connected to Discord!\n' f'{bot.user.name} is connected to the following guild:\n' f'{guild.name}(id: {guild.id})') # Setup alarm table if not sql.alarm_table_exists(): print("No database found. Creating...") sql.create_table() # Get next alarm set_signal_next_alarm()
def create(self, table_name, field_types): db_cursor = self.db_conn.cursor() try: db_cursor.execute(sql.create_table(table_name, field_types)) self.db_conn.commit() except sqlite3.OperationalError: pass # Already exists?
def check_tables(): c.pr("I","Checking If Destination Table's Exists",0) for scrip in scrips: #c.pr("I","Checking For Table "+scrip,1) qry = "SELECT * FROM information_schema.tables WHERE table_schema = 'stocki' AND table_name = '"+scrip+"' LIMIT 1;" if s.rcnt(qry): c.pr("I",scrip+" Table Exists",1) else: c.pr("I",scrip+" Table Needs To Be Created",1) s.create_table(scrip,"time:DT,timestamp:VC:15,open:FL,low:FL,high:FL,close:FL,volume:IN") qry = "SELECT * FROM information_schema.tables WHERE table_schema = 'stocki' AND table_name = '"+scrip+"_FUT' LIMIT 1;" if s.rcnt(qry): c.pr("I",scrip+"_FUT Table Exists",1) else: c.pr("I",scrip+"_FUT Table Needs To Be Created",1) s.create_table(scrip+"_FUT","time:DT,timestamp:VC:15,open:FL,low:FL,high:FL,close:FL,volume:IN") return
def __init__(self, *args, **kwargs): super(ShortestForwarding, self).__init__(*args, **kwargs) self.name = 'shortest_forwarding' self.awareness = kwargs["network_awareness"] self.monitor = kwargs["network_monitor"] self.delay_detector = kwargs["network_delay_detector"] self.datapaths = {} self.weight = self.WEIGHT_MODEL[CONF.weight] self.vip = {} self.conn = sql.get_conn(GPATH) self.flowconn = sql.get_conn(FPATH) self.busy = False self.doing_list = set() self.flow_infome = {} self.dotime = 0 self.fltime = 0 sql.drop_table(self.conn, 'switch') sql.create_table(self.conn, TABLESWITCH) sql.drop_table(self.conn, 'flow') sql.create_table(self.conn, TABLEFLOW) self.vip_thread = hub.spawn(self._vip)
def circular_get_data_table(self, db_name, tables): # 跳过前面多少个 index = 1 cur = 1 for table in tables: if cur < index: cur += 1 continue table_name_origin = table["label"] print("managing table: name:" + table_name_origin + " id : " + str(table["id"])) # 获取统计数据和明细数据 urls = [self.get_maindata_url, self.get_countdata_url] table_name_lastfix = ["明细", "统计"] for index in [0, 1]: # 10 +- 3秒 sleep_time = 7 + random.random() * 6 print("sleep for:" + str(sleep_time)) sleep(sleep_time) url = urls[index] table_name = table_name_origin + table_name_lastfix[index] # 获取表头 status, columns = self.get_columns(self, table["id"], url) if not status: print("no columns?") continue # return False # 新建数据表 if not sql.create_table(db_name, table_name, columns): print( "circular_get_data_table create_table error: db_name: " + db_name + " table_name:" + table_name) continue # 爬取数据并写入 # 本次只需要表头 # status, data = self.circular_get_data(self, table["id"], url) # if status: # if not self.save_data(self, db_name, table_name, data): # return False # else: # print("circular_get_data_table error: " + table_name) # return False return True
def int(): create_table() return 'done'
#git=lhtangtao #my_website=http://www.lhtangtao.com #Description=主页 """ import datetime import time from main_page import get_page_num, get_village_href, get_location from sql import create_table, update_info from tools import get_administrative_location from village import get_village_page_num, get_village_info if __name__ == '__main__': id_num = 1 id_num_source = 0 create_table() # 创建一张数据库 把数据往里面写 new_house_page = int(get_page_num( )) # 查看http://www.tmsf.com/newhouse/property_searchall.htm 这个网站 一共有多少页新房。 print u"一共有" + str(new_house_page) + u"页新房小区" # 修改page= # http://www.tmsf.com/newhouse/property_searchall.htm?searchkeyword=&keyword=&sid=&districtid=&areaid=&dealprice=&propertystate=&propertytype=&ordertype=&priceorder=&openorder=&view720data=&page=3&bbs=&avanumorder=&comnumorder= # 随后获取每页的小区的url链接地址 url1 = "http://www.tmsf.com/newhouse/property_searchall.htm?searchkeyword=&keyword=&sid=&districtid=&areaid" \ "=&dealprice=&propertystate=&propertytype=&ordertype=&priceorder=&openorder=&view720data=&page=" url2 = "&bbs=&avanumorder=&comnumorder=" # 示例网址:http://www.tmsf.com/newhouse/property_33_514158189_price.htm now_time_start = datetime.datetime.now() # 现在 for i in range(1, new_house_page + 1): now_time_start_page = datetime.datetime.now() # 现在 url = url1 + str(i) + url2 every_page_village_url_list = get_village_href(
for y, tpe in enumerate(df['TYPE']): types.append(str(tpe)) print(len(types)) table_set = set(tables) for i in range(len(tables)): #si le début du nom du champ est composé des 3 dernière lettres du nom de la table #alors on l'ajoute aux paramètres #if fields[i].startswith(str(tables[i])[-3:]): line = [] if descriptions[i] == 'Clé étrangère': line = (str(tables[i]), str(fields[i]), str(sizes[i]), str(types[i]), 2, str(references[i])) elif descriptions[i] == 'Clé primaire': line = (str(tables[i]), str(fields[i]), str(sizes[i]), str(types[i]), 1, str(references[i])) else: line = (str(tables[i]), str(fields[i]), str(sizes[i]), str(types[i]), 0, str(references[i])) parameters.append(line) #print("field data =>",str(tables[i])+'/'+str(fields[i])) print(parameters) print(len(table_set)) for t in table_set: create_table(t) for p in parameters: alter_table(p)
def run(make_prediction=True): print make_prediction print("v4") # Print current iteration/version for sanity session = requests.Session() # Construct a NextBus API compliant requester session.headers.update({"User-Agent": "NextBuzz ([email protected])"}) if make_prediction: model_path = os.path.join(BASE_DIR, "model.pkl") model = joblib.load(model_path) # Load in the regression model sql.create_table() # Create database infra gt = GeorgiaTech() # Instatiate context object while True: # Big loop for scraping bus data. try: time.sleep(5) # Pause between requests # Collect weather data weather = session.get( "https://api.openweathermap.org/data/2.5/weather?q=atlanta&APPID=00c4c655fa601a48dc5bf4f34c4ce86a" ) if weather.status_code != 200: # Restart loop if we can't get weather data. continue weather_json = weather.json() # Collect and parse NextBus data for route in gt.all_routes: time.sleep(2) # Pause between queries r = session.get( "https://gtbuses.herokuapp.com/agencies/georgia-tech/routes/" + route + "/predictions") r2 = session.get( "https://gtbuses.herokuapp.com/agencies/georgia-tech/routes/" + route + "/vehicles") if r.status_code != 200 or r2.status_code != 200: continue stops = xmltodict.parse(r.text)["body"]["predictions"] # All stops for this route for stop in stops: stop_name = stop["@stopTag"] route_name = stop["@routeTag"] # First determine if there are any predictions if "direction" not in stop: log.log("No predictions for stop " + stop_name + " for route " + route_name) continue stop_predictions = stop["direction"]["prediction"] if type(stop_predictions) == list: prediction = stop_predictions[0] else: prediction = stop_predictions # Next extract prediction data layover = "@affectedByLayover" in prediction is_departure = prediction["@isDeparture"] == "true" arrival_epoch = int(int(prediction["@epochTime"]) / 1000) seconds_arrival = int(prediction["@seconds"]) current_epoch = arrival_epoch - seconds_arrival bus_number = prediction["@vehicle"] # Next extract vehicle data vehicles = xmltodict.parse(r2.text)["body"]["vehicle"] if type(vehicles) != list: vehicles = [vehicles] numbuses = len(vehicles) kmperhr = -1 buslat = -1 buslong = -1 for v in vehicles: if bus_number == v["@id"]: kmperhr = v["@speedKmHr"] buslat = v["@lat"] buslong = v["@lon"] # Next is weather data weather_name = None if type(weather_json["weather"]) == list and len( weather_json["weather"]) > 0: weather_name = weather_json["weather"][0]["main"] # Build the row row = [] row.append(current_epoch) # Timestamp row.append(stop_name) # Stop being approached row.append(route_name) # Red, blue... row.append(kmperhr) # Speed of bus row.append(bus_number) # Bus ID row.append(numbuses) # Number of buses row.append(buslat) # Latitude of bus row.append(buslong) # Longitude of bus row.append( str(layover)) # Is this bus' prediction inacurrate? row.append(str(is_departure)) # Is the bus waiting? row.append(arrival_epoch) # Predicted timestamp of arrival row.append( seconds_arrival) # Seconds to arrival prediction row.append(weather_json["main"]["temp"]) # Temp in kelvin row.append( weather_json["main"]["pressure"]) # Air pressure row.append( weather_json["main"]["humidity"]) # Air humidity row.append(weather_json["visibility"]) # Air visibility row.append(weather_name) # cloudy, rainy, sunny... row.append(weather_json["wind"]["speed"]) # Wind speed row.append(weather_json["clouds"]["all"]) # Cloud coverage if make_prediction: # Use these features to predict actualSecondsToArrival my_prediction = predict.predict(model, row)[0] row.append(my_prediction) print( str(my_prediction) + " from " + str(seconds_arrival)) output = "(" for item in row: if isinstance(item, basestring): output += "\'" + str(item) + "\'," else: output += str(item) + "," output = output[0:-1] output += ")" query = "INSERT INTO NEXTBUS VALUES " + output print(query) sql.query_write(query) log.log("Inserted for " + route_name + " at " + stop_name) except Exception as e: log.log("Exception:") log.log(traceback.format_exc())
], cache_time=0 ) else: update.inline_query.answer([], switch_pm_text="Not sure what you're listening to.", switch_pm_parameter='notsure', cache_time=0) except Exception as e: print(e) update.inline_query.answer([], switch_pm_text="You're not listening to anything.", switch_pm_parameter='notlistening', cache_time=0) helptext = ''' Tap /now to share what you're listening to on Spotify. You can also use the inline mode by typing @SpotifyNowBot in any chat.\n If you're new, you need to /link your account to get started. You can always /unlink it whenever you feel like.\n If you're facing errors, try restarting Spotify. No good? Send /cancel followed by /relink and if the issue persists, report it to @notdedsec.\n''' if __name__ == "__main__": if not os.path.exists('spotifynow.db'): sql.create_table() with open('config.json','r') as conf: config = json.load(conf) dumpchannel, jkey, client_id, client_secret, redirect_uri, bot_token, sudoList = config.values() authlink = f"https://accounts.spotify.com/authorize?client_id={client_id}&response_type=code&redirect_uri={linkparse(redirect_uri)}&scope=user-read-currently-playing" updater = Updater(bot_token, use_context=True) os.system("title " + Bot(bot_token).first_name) logging.basicConfig(format='\n\n%(levelname)s\n%(asctime)s\n%(name)s\n%(message)s', level=logging.ERROR) USERNAME, AUTHTOKEN = range(2) link_handler = ConversationHandler( entry_points=[CommandHandler('link', link)], states={USERNAME: [MessageHandler(Filters.text, getusername)]}, fallbacks=[CommandHandler('cancel', cancel)])
url = await q.get() data = await get_json(client, url) id_ = data['id'] name = data['name'] sprite = await get_sprite_response(client, data['sprites']['default']) effect = data['effect_entries'][0]['effect'].replace('\n', '') flavor_text = data['flavor_text_entries'][0]['text'].replace('\n', ' ') category = data['category']['name'] item = Item(id_, name, flavor_text, sprite, effect, category) items.append(item) q.task_done() async def retrieve_items(client, queue, producer, consumer, urls): producers = asyncio.create_task(producer(queue, urls)) consumers = [asyncio.create_task(consumer(client)) for x in range(0, len(urls))] await asyncio.gather(producers) await q.join() for consumer in consumers: consumer.cancel() async def main(): async with aiohttp.ClientSession() as client: await retrieve_items(client, q, task_creator, getter, urls) response = requests.get(url).json() urls = [x['url'] for x in response['results']] loop.run_until_complete(main()) create_table(CREATE_ITEM_TABLE) insert_items_query(items)
pokemon_data = Pokemon(data['name'], description, colour, pokedex, sprite, _type) pokemon.append(pokemon_data) print(len(pokemon)) q.task_done() print('task finished') async def retrieve_poke(client, q, producer, consumer, urls): producers = asyncio.create_task(producer(q, urls)) consumers = [ asyncio.create_task(consumer(client)) for x in range(0, len(urls)) ] await asyncio.gather(producers) await q.join() for consumer in consumers: consumer.cancel() async def main(): async with aiohttp.ClientSession(loop=loop) as client: await retrieve_poke(client, q, task_creator, getter, urls) data = requests.get(url).json() urls = [x['url'] for x in data['results']] loop.run_until_complete(main()) create_database(CREATE_POKEDEX_DATABASE) create_table(CREATE_POKEMON_TABLE) insert_pokemon_query(pokemon)
else: i = i + 1 l.append(i) #print (read_data[l[0]+1:l[1]]) header = [] for z in range( 9 ): # 0 = dates, 1 = Cat, 2 = subCat, 3 = Loc, 4 = Des, 5 = Amount, 6 = Status, 7 = Remark if z != 6: header.append(read_data[l[z] + 1:l[z + 1]]) #print (header[6]) conn = sql.create_connection(db_dir) with conn: if initial: if conn is not None: #initialize category table sql.create_table(conn, sql_create_category_table) sql.create_table(conn, sql_create_location_table) sql.create_table(conn, sql_create_description_table) sql.create_table(conn, sql_create_status_table) sql.create_table(conn, sql_create_expense_table) else: print("Error loading database") ''' Category Start ''' index1 = sql.query_CategoryID(conn, header[1]) if index1 is None: task = ("null", header[1]) idd = sql.create_category(conn, task) #print (idd) '''
date TEXT NOT NULL, source TEXT, explicit INTEGER ); """ sql_create_tag_table = """ CREATE TABLE IF NOT EXISTS tag ( id INTEGER PRIMARY KEY, name text NOT NULL ); """ # create a database connection conn = sql.create_connection(database) if conn is not None: # create image table sql.create_table(conn, sql_create_image_table) # create tag table sql.create_table(conn, sql_create_tag_table) else: print("Error! cannot create the database connection") def text_based(): with conn: # user input com = input("Enter command: ") while com: if com == "createimage": name = input("Enter name: ") date = input("Enter date: ")
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. from sql import create_table, retrieve_data create_table('Test.db', 'Test') data = retrieve_data('Test.db', 'Test', 'Title', 'Hello') print(data)
def main(): vv = False if '--help' in argv or '-h' in argv: print\ (""" === VK mobile phones dumper MySQL == -h --help : show this help and exit -v --view : show logs while programm running """) print('Usage: ' + argv[0] + '[group_id] [params]') print() print('Examples:') print() print(argv[0] + ' 123 -v') print(argv[0] + ' 456') exit(0) if len(argv) < 2: logs.echoInfo('Usage: [group_id] [params]') logs.echoInfo('Example: ' + argv[0] + ' 1234567 -v') exit(0) if '-v' in argv or '--view' in argv or '-vv' in argv: vv = True try: group_id = int(argv[1]) if group_id < 1: logs.echoWarning('Group id must be positive') exit(0) except: logs.echoWarning('Invalid group_id') logs.echoInfo('Group id must be an integer') if not settings.user['access_token']: logs.echoWarning('Missing access_token. Put it in settings.py file') exit(0) if vv: logs.echoInfo('Checking access_token...') if not f.check_valid(): logs.echoMinus( 'access_token is invalid! Put correct access_token in settings.py') exit(0) if vv: logs.echoPlus('access_token is valid') sql.create_table() if vv: logs.echoPlus('Created table ' + settings.db['table_name']) logs.echoInfo('Starting...') start_time = time.time() members_count = f.groupsGetMembers(group_id, 0)['count'] offset = 0 analyzed = 0 wrote = 0 logs.echoPlus('Started') try: while offset < members_count: user_ids = f.groupsGetMembers(group_id, offset)['items'] us_offset_1 = -200 us_offset_2 = 0 while us_offset_2 != len(user_ids): us_offset_1 += 200 if (len(user_ids) - 200) > us_offset_2: us_offset_2 += 200 else: us_offset_2 = len(user_ids) response = f.usersGetInfo(user_ids[us_offset_1:us_offset_2]) for i in range(len(response)): try: flag = True info = response[i] link = 'https://vk.com/' + info['domain'] first_name = info['first_name'] last_name = info['last_name'] mobile_phone = info['mobile_phone'].replace( '(', '').replace(')', '').replace(' ', '').replace('-', '') try: city = info['city']['title'] except: city = 'Не указан' try: country = info['country']['title'] except: country = 'Не указана' try: bdate = info['bdate'] except: bdate = 'Не указана' try: sex = info['sex'] if sex == 1: sex = 'Женский' elif sex == 2: sex = 'Мужской' else: sex = 'Другое' except: sex = 'Не указан' try: for symbol in settings.filtr[ 'phone_blacklisted_symbols']: if not flag: break if symbol in mobile_phone.lower() or len( mobile_phone ) < 8 or mobile_phone in settings.filtr[ 'phone_blacklisted_phones']: flag = False except Exception: pass if sex.lower( ) in settings.filtr['sex_blacklisted_sex']: flag = False if city.lower( ) in settings.filtr['city_blacklisted_city']: flag = False if country.lower( ) in settings.filtr['country_blacklisted_country']: flag = False if bdate in settings.filtr['bdate_blacklisted_bdate']: flag = False if flag: sql.add_line(first_name=first_name, last_name=last_name, sex=sex, bdate=bdate, mobile_phone=mobile_phone, city=city, country=country, link=link) wrote += 1 except: pass analyzed += len(response) if vv: logs.echoInfo('Analyzed users: (' + str(analyzed) + '/' + str(members_count) + ')') if vv: logs.echoInfo('Wrote info about ' + str(wrote) + ' users') offset += 1000 print() logs.echoPlus('Finished') f.stats(start_time=start_time, members_count=members_count, analyzed=analyzed, wrote=wrote, did_not_indicate=members_count - wrote) except KeyboardInterrupt: print() logs.echoInfo('Interrupted') f.stats(start_time=start_time, members_count=members_count, analyzed=analyzed, wrote=wrote, did_not_indicate=members_count - wrote)