def main(): # Even though main is already wrapped in a while True below, this maintains the # stream settings so it doesn't load any historical info subreddit = reddit.subreddit('+'.join(subreddits)) comment_stream = subreddit.stream.comments(pause_after=0) submission_stream = subreddit.stream.submissions(pause_after=0) iteration = 0 while True: if (iteration % 500) == 0: print("Iteration: {}".format(iteration)) iteration += 1 conn, cursor = db.connect_to_db() for comment in comment_stream: if comment is None: break check_comment(comment, conn, cursor) for submission in submission_stream: if submission is None: break check_submission(submission, conn, cursor) #for comment in reddit.inbox.unread(mark_read=True, limit=None): # if isinstance(comment, Comment): # check_comment(comment, conn, cursor) conn.close()
def main(): ref = driver.find_element_by_xpath('//*[@id="react-root"]/section/main/div/div[1]/div/div[1]/div[2]/div/a') ref.click() sleep(1) user = driver.find_element_by_xpath('//*[@id="react-root"]/section/main/div/header/section/div[1]/h1') if connect_to_db(user.text): print(4444) sleep(3) msg_btn = driver.find_element_by_xpath('//*[@id="react-root"]/section/main/div/header/section/div[2]/div[1]/button') msg_btn.click() sleep(3) text_box = driver.find_element_by_xpath('//*[@id="react-root"]/section/div[2]/div[2]/div/div/div/textarea') text_box.send_keys(MESSAGE) sleep(1) print(3333) send_button = driver.find_element_by_xpath('//*[@id="react-root"]/section/div[2]/div[2]/div/div/div[2]/button') send_button.click() else: print(2222) sub = driver.find_element_by_xpath('//*[@id="react-root"]/section/main/div/header/section/div[2]/div/span/span[1]/button') sub.click() sleep(3) msg_btn = driver.find_element_by_xpath('//*[@id="react-root"]/section/main/div/header/section/div[2]/div[1]/button') msg_btn.click() sleep(3) text_box = driver.find_element_by_xpath('//*[@id="react-root"]/section/div[2]/div[2]/div/div/div/textarea') text_box.send_keys(MESSAGE) sleep(1) print(1111) send_button = driver.find_element_by_xpath('//*[@id="react-root"]/section/div[2]/div[2]/div/div/div[2]/button') send_button.click()
def get_all_stands(): cursor = connect_to_db() cursor.execute( "SELECT osm_id, name, ST_AsGeoJSON(st_transform(way, 4326)) " "FROM planet_osm_point " "WHERE amenity = 'bicycle_rental' AND operator = 'WhiteBikes'") stands = cursor.fetchall() return stands
def main(): # Turn on server main_sock = socket.socket() main_sock.bind(('0.0.0.0', 8080)) # port main_sock.listen(5) # Connect to mongodb connect_to_db() # testing for insertion print('Waiting for connection...') while True: (socket_accept, addr) = main_sock.accept() # Create a new thread to handle requests # 300 seconds of timeout socket_accept.settimeout(300) thread = threading.Thread(target=handler, args=(socket_accept, addr)) thread.start()
def get_nearest_bike_paths(stand_id): cursor = connect_to_db() cursor.execute( "SELECT st_asgeojson(st_transform(l.way, 4326)) FROM planet_osm_point p, planet_osm_line l " "WHERE p.osm_id = {stand_id} AND (l.bicycle = 'designated' OR l.highway = 'cycleway') AND ST_DWithin(st_setsrid(p.way, 4326), st_setsrid(l.way, 4326), 1000); " .format(stand_id=stand_id)) paths = cursor.fetchall() return paths
def get_city_district_by_name(district_name): cursor = connect_to_db() cursor.execute( "SELECT st_asgeojson(st_transform(way, 4326)) AS way " "FROM planet_osm_polygon WHERE upper(name) = upper('{district_name}') LIMIT 1;" .format(district_name=district_name)) district = cursor.fetchall() return district
def get_stands_by_city_district(district_name): cursor = connect_to_db() cursor.execute( "SELECT DISTINCT point.osm_id, point.name " "FROM planet_osm_polygon pol, planet_osm_point point " "WHERE upper(pol.name) = upper('{district_name}') AND point.operator = 'WhiteBikes' AND point.amenity = 'bicycle_rental' AND st_intersects(pol.way, point.way)" .format(district_name=district_name)) stands = cursor.fetchall() return stands
def run(): while True: command = input() if command == 'start': data = _generate_sensor_data() engine = db.connect_to_db(config.DB_CONNECTION) elif command == 'quit': exit() False db.crete_tables(engine) db.write_record_to_db(data, engine)
def pull_to_db(): config = dotenv_values(".env") db_connection = db.connect_to_db(config["db_host"], config["db_port"], config["db_user"], config["db_pass"], config["db_database"]) dictionary = scrapper.get_dictionary(config["data_url"]) for station_id, data in dictionary.items(): db.insert_to_db(db_connection, OWCity.OWCity(station_id, data)) db_connection.close()
def get_nearest_stands(lat, lng, not_empty): cursor = connect_to_db() cursor.execute( "SELECT osm_id, name, trunc(ST_Distance(way, st_transform( st_setsrid(st_makepoint({lng}, {lat}), 4326), 3857))) AS distance " "FROM planet_osm_point " "WHERE ST_DWithin(way, st_transform( st_setsrid(st_makepoint({lng}, {lat}), 4326), 3857), 2000) " "AND amenity = 'bicycle_rental' and operator like 'WhiteBikes' " "ORDER BY distance;".format(lat=lat, lng=lng)) stands = cursor.fetchall() if not_empty == 'true': stands_with_bikes = [] for stand in stands: bike_count = get_current_bike_count(stand[1]) if bike_count > 0: stands_with_bikes.append(stand) stands = stands_with_bikes return stands
def process_data(path, func): logger.info(f"Start processing '{path}' data") files = [ os.path.join(dirpath, filename) for (dirpath, dirnames, filenames) in os.walk(path) for filename in filenames if filenames ] file_amount = len(files) logger.info(f"'{file_amount}' files found in '{path}'") conn = connect_to_db() for i, file in enumerate(files, 1): func(conn, file) logger.info(f'{i}/{file_amount} files processed.') close_connection(conn) logger.info(f"Finish processing '{path}' data")
def main(): print("[+] Starting pokemon scraper bot") print("[+] Loading ./config.json file") with open("config.json") as json_data_file: config = json.load(json_data_file) print("[+] Success config loaded") driver = bot.web.gen_driver() raw_data = bot.pokemon.gather_base_pokemon(driver) data = bot.pokemon.gather_specific_pokemon_data(driver, raw_data, config["client_id"]) print("[+] Connecting to db " + config["db_name"]) db_connection = db.connect_to_db(config["db_name"]) print("[+] Success connected to db") print("[+] Adding items to " + config["collection_name"]) for item in data: db.insert_item(db_connection, config["collection_name"], item) print("[+] Success all items have been added") print("[+] Scraper bot complete")
def get_nearest_stands_by_street(street_name, not_empty=False): cursor = connect_to_db() cursor.execute( "SELECT osm_id, name, distance FROM (" "WITH RECURSIVE streets AS (" "SELECT osm_id, name, way FROM planet_osm_line " "WHERE upper(name) = upper('{street_name}')) " "SELECT DISTINCT ON (p.osm_id) p.osm_id, p.name, p.way, trunc(st_distance(st_setsrid(p.way, 4326), st_setsrid(l.way, 4326))) AS distance FROM planet_osm_point p, streets l " "where p.amenity = 'bicycle_rental' AND p.operator = 'WhiteBikes' AND ST_DWithin(st_setsrid(p.way, 4326), st_setsrid(l.way, 4326), 1000)) unordered_stands " "ORDER BY distance;".format(street_name=street_name)) stands = cursor.fetchall() if not_empty == 'true': stands_with_bikes = [] for stand in stands: bike_count = get_current_bike_count(stand[1]) if bike_count > 0: stands_with_bikes.append(stand) stands = stands_with_bikes return stands
import os, sys sys.path.append( os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__))))) from db import connect_to_db # Create database connection engine = connect_to_db() # Creating asset table asset_query = """CREATE TABLE asset ( id BIGSERIAL PRIMARY KEY, symbol VARCHAR (15) NOT NULL UNIQUE, name VARCHAR (100) NULL, type VARCHAR (10) NULL, yf_symbol VARCHAR (15) NULL );""" engine.execute(asset_query) # Creating portfolio table portfolio_query = """CREATE TABLE portfolio ( id BIGSERIAL PRIMARY KEY, name VARCHAR (100) NOT NULL UNIQUE );""" engine.execute(portfolio_query) # Creating asset_portfolio table asset_portfolio_query = """CREATE TABLE asset_portfolio ( id BIGSERIAL PRIMARY KEY, asset_id INTEGER NOT NULL REFERENCES asset(id),
date = datetime.date.today() year = date.year month = date.month day = date.day file_name = "{0}-doi-{1}-{2}-{3}.csv".format(journal.get('path'), year, month, day) with open(os.path.join(settings.BASE_DIR, 'csv', file_name), 'w') as fp: a = csv.writer(fp, delimiter=',') for row in records: a.writerow(row) # connect to the database cursor, con = db.connect_to_db() # grab the journals and loop through them generating a csv for each journals = db.multi_getter_where(cursor, table='journals', where='enabled=1') for journal in journals: print "Generating CSV for {0}".format(journal.get('path')) # grab this journal's published articles published_articles = db.get_published_articles(cursor, journal.get('journal_id')) # get csv record rows for each author records = get_csv_data(cursor, published_articles, journal.get('path')) print "{0} records found".format(len(records)) # export a csv for these records
import sys from myapp.create_app import create_app from db import connect_to_db app = create_app() MONGO = connect_to_db(app) # print(sys.path) if __name__ == '__main__': app.run(host='0.0.0.0', debug=True)
def init_db_and_user(self, *args, **kwargs): connect = db.connect_to_db(**kwargs) self.Followers = db.Base(connect, settings.table_name) if not self.Followers.check_on_table_created(): self.Followers.create_table()
def db_connecter(**kwargs): connect = db.connect_to_db(**kwargs) DB = db.Base(connect, settings.table_name) if not DB.check_on_table_created(): DB.create_table() return DB
def create_sql_session(): session = sessionmaker(connect_to_db()) return session()
import os import json from random import choice, randint from db import ( db, connect_to_db, loans as loans_db ) import server os.system('dropdb loans') os.system('createdb loans') connect_to_db(server.app) with open('seed_data/loan.json') as f: loan_data = json.loads(f.read()) for category_json in loan_data: category = loans_db.create_category_loan(category_json["category_name"]) loan_data_list = category_json["loans"] for loan_json in loan_data_list: loan = loans_db.create_loan(loan_json["loan_name"], loan_json["loan_description"], loan_json["loan_website"], loan_json["loan_gov"], loan_json["loan_region"], loan_json["loan_city"], loan_json["loan_credit_union"], category.category_loans_id, loan_json["loan_photo"])
stream_index: { 'sumDelta': 0, 'averageDelta': 0, 'ip': ip, 'domain': string, 'numberOfPackets': 1, 'totalMbSize': size in MB, 'startTime': timestamp, 'endTime': timestamp, 'protocol': string } } """ packet_dict = {} # Connect to db if we are in the mongo export mode if (args.export == "mongo"): environment.check_mongo_env() db.connect_to_db() # Open the capture file cap = pyshark.FileCapture(captureFileName) # Launch capture file analysis cap.apply_on_packets(analyse_packets) # We push_data all the remaining streams in packet_dict for key in packet_dict: push_data(key) print('Analyse done')
def get_sql_session(): """ Creates and returns a SQLAlchemy session() object """ session = sessionmaker(connect_to_db()) return session()
@app.route("/save_loan.json", methods=['POST']) def save_loan_json(): return loans_api.save_loan_json() @app.route("/delete_loan.json", methods=['POST']) def delete_loan_json(): return loans_api.delete_loan_json() @app.route("/compare_loans.json", methods=['POST']) def compare_loans(): return loans_api.compare_loans() @app.route("/create_user", methods=['POST']) def create_user(): return user_api.create_user() @app.route("/user_profile.json", methods=['GET']) def user_profile(): return user_api.user_profile() if __name__ == '__main__': connect_to_db(app) app.run(host='0.0.0.0')
'accept': "application/vnd.vtex.ds.v10+json", 'x-vtex-api-appkey': api_key, 'x-vtex-api-apptoken': api_token } # url used for api # f_authorizedDate=authorizedDate:[2020-05-15T02:00:00.000Z TO 2020-05-20T01:59:59.999Z]& url_stocks_from_date = "https://vetro.vtexcommercestable.com.br/api/oms/pvt/orders?f_authorizedDate=authorizedDate:[{} TO {}]&orderBy=creationDate,asc&page=".format( forty_mins_before, now) url_get_stock = "https://vetro.vtexcommercestable.com.br/api/oms/pvt/orders/" # iteration ids for orders flag = True current_page = 1 # database env connection = connect_to_db() cursor = get_cursor() tvas_dict = get_tva(connection) # global variables to use not destructured client_list = [] order_list = [] # only needed if you want to deleted, each entity on db # removing_orders(connection) # removing_clients(connection) # removing_addresses(connection) while flag: # this request is used to get all the orders on a while print('(+) get page {} orders'.format(current_page))
retval = get_all_topics(ALL_DBS) return jsonify({'all_topics': retval}) @app.route('/api/1.0/add_category', methods = ['POST']) def add_category(): retval = add_new_category(request.json, ALL_DBS) return retval @app.route('/api/1.0/add_topic', methods = ['POST']) def add_topic(): retval = add_new_topic(request.json, ALL_DBS) return jsonify({'return_code': retval}) @app.route('/api/1.0/<string:api_call>', methods = ['POST']) def generic_api_call(api_call): if not request.json: abort(400) param1 = request.json.get('param1', 'no param 1') param2 = request.json.get('param2', 'no param 2') retval = {'param_1': param1, 'api_call': api_call, 'param_2': param2} return jsonify(retval) if __name__ == '__main__': # debug = True makes the server restart when the Python files change. TODO: make it # depend on whether we're running locally or in production. ALL_DBS = connect_to_db() # create_playlists(ALL_DBS) app.run(debug = True)
def upload2db(path, test_project, test_job, db_uri, test_record): from db import TestProject, TestJob, TestRunRecord, PreRecord_CPU, PreRecord_FPS, PreRecord_JIF, PreRecord_Net, \ PreRecord_PRI, PreRecord_PSS from db import connect_to_db from sqlobject import AND # 数据库准备操作 try: connect_to_db(db_uri) record = TestRunRecord.select( AND(TestJob.j.TestProject, TestRunRecord.j.TestJob, TestProject.q.Name == test_project, TestJob.q.Name == test_job, TestRunRecord.q.Name == test_record)).getOne() except: # raise RuntimeError(u'DataBase Ini Error:{} {} {} {}'.format(db_uri, test_project, test_job, test_record)) raise from datetime import datetime class mydate(object): def __new__(cls, str): return datetime.strptime(str, '%Y-%m-%d_%H:%M:%S.%f') class myfloat(object): def __new__(cls, str): return float(str.strip().strip('%')) / 100 csv_info = { 'Pc': { 'l': 11, 'c': PreRecord_CPU, 'cs': [('T', mydate), ('V', myfloat)] }, 'FP': { 'l': 11, 'c': PreRecord_FPS, 'cs': [('T', mydate), ('V', int)] }, 'Pj': { 'l': 11, 'c': PreRecord_JIF, 'cs': [('T', mydate), ('V', int)] }, 'Pn': { 'l': 12, 'c': PreRecord_Net, 'cs': [('T', mydate), ('Transmitted', float), ('Transmitted', float)] }, 'Ps': { 'l': 12, 'c': PreRecord_PSS, 'cs': [ ('T', mydate), ('Total', int), ('Dalvik', int), ('Native', int), ] }, 'Pr': { 'l': 12, 'c': PreRecord_PRI, 'cs': [ ('T', mydate), ('Total', int), ('Dalvik', int), ('Native', int), ] } } for f in os.listdir(path): if f[-3:] != 'csv': continue logger.log(logging.INFO, 'Upload{}'.format(f)) f_info = csv_info[f[:2]] with open(path + '/' + f, 'r') as fo: ff = fo.read().split('\n') ds = ff[3].split(',')[1].strip() for l in ff[f_info['l']:]: l = l.strip() if '' == l: continue ks, vs = (['TestRunRecord'], [record]) css = (ds + '_' + l).split(',') for i in range(0, len(f_info['cs'])): ks.append(f_info['cs'][i][0]) vs.append(f_info['cs'][i][1](css[i])) f_info['c'](**dict(zip(ks, vs)))