def read_stream(stream, keywords=KEYWORDS, batch_size=1000): ''' Find the frequency of keywords in 1000 tweets. Creates a dictionary {'kw1': freq, 'kw2':freq,...} and tokenize the text and insert the dictionary to MongoDB databases. ''' kw_freq = {kw: 0 for kw in keywords} i = 0 tweets = [] while i < batch_size: tweet = next(stream) if tweet: tweet = json.loads(tweet) if tweet['data']['lang'] == 'en': i += 1 words = [ word.lower() for word in tweet['data']['text'].split() ] for word in words: if word in kw_freq: kw_freq[word] += 1 # tweet['timestamp'] = ymdhms() # tweet['data']['text_tokenized'] = preprocess_text(tweet['data']['text']) # tweets.append(tweet) # print(f"read_stream {i}/{batch_size}") kw_freq['timestamp'] = ymdhms() insert_data(kw_freq, 'keywords_frequencies', many=False)
def insert_new(tablename): table = database.get_table(tablename) columns = table.columns.keys() data = {} for i in columns: if i == 'DServing' or i == 'Bdate' or i == 'BDATE': temp = request.form.get(i, '') if temp == '': temp = time.strftime('%Y-%m-%d') temp = datetime.datetime.strptime(temp, '%Y-%m-%d') elif i == 'Photo': filename = request.files[i].filename temp = request.files[i].read() if filename == '': temp = read_default() else: temp = request.form.get(i, '') data[i] = temp print(data, file=sys.stderr) database.insert_data(tablename, data) columns, result = database.query_execute( tablename, data[table.primary_key.columns.keys()[0]]) results = parse_query_result(columns, result) return render_template('insert.html', columns=columns, results=results)
def update_list(self): db,cu = database.connect_db() input_letter = self.letterLineEdit.text().toUtf8() input_letter = str(input_letter).strip() input_value = self.valueLineEdit.text() label = 'one' try: input_value = int(input_value) database.insert_data(input_letter,input_value,label,cu,db) self.letterLineEdit.clear() self.valueLineEdit.clear() try: for i in range(self.verticalLayoutScroll.count()): self.verticalLayoutScroll.itemAt(i).widget().close() except: pass self.list_alpha(label) except database.DataErorr as e : QtGui.QMessageBox.warning(self, "Cannot store value", e.message, QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton, QtGui.QMessageBox.NoButton) return except Exception as foo : QtGui.QMessageBox.warning(self, "Cannot store value", "Please check values", QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton, QtGui.QMessageBox.NoButton) return
def get_data(): try: kline = client.get_historical_klines(tradewith, Client.KLINE_INTERVAL_5MINUTE, "1 hour ago UTC") except BinanceAPIException as ex: print (ex) timestamp, opens, high, low, close, volume = kline[len(kline)-1][0], kline[len(kline)-1][1], kline[len(kline)-1][2], kline[len(kline)-1][3], kline[len(kline)-1][4], kline[len(kline)-1][5] database.insert_data(timestamp, opens, high, low, close, volume)
def monitoring_UI(sec): try: db.create_database() comp = False clist_start = [] clist_stop = [] prev_date = 0 while True: proclist, date = pr.proc_list() if comp: #Compare amd analyze prev_list = db.load_data_by_date(prev_date) clist_start, clist_stop = co.compare_process( proclist, prev_list) #Print Status Log if clist_start: fp.print_list(clist_start) if clist_stop: fp.print_list(clist_stop) if not clist_start: comp = False else: comp = True #Print All Running Process db.insert_data(proclist) prev_date = date time.sleep(sec) except KeyboardInterrupt: sys.exit(0)
def write_into_movies_table(): df = pd.read_csv("movies_data.csv") df = df[[ "imdbId", "num_votes", "rating_mean", "rating_std", "runtimeMinutes", "startYear", "primaryTitle", "originalTitle" ]] df = df.rename( columns={ "imdbId": "IMDB_ID", "primaryTitle": "Primary_Title", "originalTitle": "Original_Title", "rating_mean": "Avg_Rating", "startYear": "Start_Year", "num_votes": "Num_Votes", "runtimeMinutes": "Runtime_Minutes" }) df["Trailer_Link"] = [None] * len(df) df["Poster_Link"] = [None] * len(df) df["Overview"] = [None] * len(df) df = df[[ "IMDB_ID", "Primary_Title", "Original_Title", "Avg_Rating", "Num_Votes", "Start_Year", "Runtime_Minutes", "Trailer_Link", "Poster_Link", "Overview" ]] df = df[df["Runtime_Minutes"] != "\\N"] database.insert_data("Movies", df) database.run_sql( "UPDATE Movies SET Trailer_Link = NULL, Poster_Link = NULL, Overview = NULL;" )
def get_form(): if request.method == 'POST': nome = request.form['nome'] cpf = request.form['cpf'] tel = request.form['telefone'] email = request.form['_email'] vagas = int(request.form['vagas']) rua = request.form['rua'] cep = request.form['cep'] num = int(request.form['num']) apt = int(request.form['apt']) dscp = request.form['descricao'] tipo = request.form['_type'] gmaps = googlemaps.Client(key=api_key) try: geocode_result = gmaps.geocode( str(num) + ' ' + rua + ', Rio de Janeiro, ' + 'RJ') #print(geocode_result[0]['geometry']['location']) lat = float(geocode_result[0]['geometry']['location']['lat']) lng = float(geocode_result[0]['geometry']['location']['lng']) except: print("Unable to get latitude and longitude from address") raise (ValueError) h = Home(lat, lng, tipo, vagas, dscp, nome, cpf, tel, cep, rua, tipo, num) database.insert_data(h) return redirect(request.url)
def main(): """""" root_url = "https://pan.baidu.com/s/" # source_url = "https://pan.baidu.com/pcloud/feed/getdynamiclist?auth_type=1&filter_types=11000&query_uk=1404980556&category=0&limit=50&start=2&bdstoken=2cef3aa5bc4fa2a5f3c29a0244e13838&channel=chunlei&clienttype=0&web=1&logid=MTU0OTUzNjg5NTc0MzAuMzc0MTY0OTE5Njk2ODM4Ng==" user_agent = UserAgent().chrome print(user_agent) headers = { "User-Agent": user_agent, "Cookie": "BAIDUID=B665C57519BB1CE733014F986955D850:FG=ww; PSTM=1524450639; BIDUPSID=4E742698B7CE1AA938C285FED0123F7A; PANWEB=ww; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; MCITY=-54028%3A; BDCLND=FdI5VGiLGiuP50G3DmS9Sv%2BT5puEraBA80TAnYJd7LM%3D; cflag=13%3A3; delPer=0; PSINO=2; BDUSS=0hxQWtLZVlEYW1HTjB2RXpJZmRIa3ZFdUZ1UXp1UmQ4R3NrQ0lmTXROOWVXb05jQUFBQUFBJCQAAAAAAAAAAAEAAAADU5O6eDkyMDIzMjc5NgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF7NW1xezVtcV; STOKEN=8ee50ac349feeb50e7e416fc38f3d2662813e837d7f044ae0a58ae812988a2d9; SCRC=24a1fa6c4edc40cba18ff8924ac79c09; Hm_lvt_7a3960b6f067eb0085b7f96ff5e660b0=1549182728,1549509635,1549510597,1549520225; Hm_lvt_f5f83a6d8b15775a02760dc5f490bc47=1549509786,1549510604,1549520259; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; BDRCVFR[M7pOaqtZgJR]=I67x6TjHwwYf0; H_PS_PSSID=1445_21090_28328_28413; Hm_lpvt_f5f83a6d8b15775a02760dc5f490bc47=1549537336; recommendTime=android2019-ww-31%2017%3A00%3A00; Hm_lpvt_7a3960b6f067eb0085b7f96ff5e660b0=1549537453; PANPSC=13191783871572786376%3AnSvEWpGhaFom0wdWVT9TBXiC6e0Ll37p5Qm%2FyHlw2fqYXvTamKXvPHgXofrWESI4fJ06i7Fpl5f%2FHSZRhh6%2BPa1Sr93PEHDiDv6sF2krPB6pWetz1AjD6SeKHrlyURG2vIcMCeE%2BEzPl0236xFtTr1QxIuFgE8LFzlMymqcoIUw%3D", "Connection": "keep-alive" } result_url_list = database.select_all_complete_url() ## 得到url list 然后for循环每个url 进行数据搜索 result_source_url_list = source_url_list.cons_source_url_list() for source_url in result_source_url_list: res = requests.get(source_url, headers=headers) # print(res.status_code) # print(res.content) result_dict = eval(str(res.content.decode())) if (result_dict.get("errno") == 2): print("~~~~~~~~~~end~~~~~~~~~~~~~~") break result_list = result_dict.get("records") # print(result_list) for each_file in result_list: title = each_file.get("title").replace("//", "/").replace( "'", "").replace("‘", "") short_url = each_file.get("shorturl") print(title) file_list = each_file.get("filelist") file_name_list = [] if file_list: for each_file1 in file_list: file_name = each_file1.get("server_filename").replace( "//", "/").replace("'", "").replace("‘", "") file_name_list.append(file_name) title = title + (str(file_name_list)).replace("'", "").replace( "‘", "") if len(title) > 220: title = title[:210] # print(short_url) # complete_url = "https://pan.baidu.com/s/" + short_url if short_url not in result_url_list: database.insert_data(title, short_url) print(title) import time time.sleep(5) database.close_db()
def fill_database(): try: klines = client.get_historical_klines(tradewith, Client.KLINE_INTERVAL_5MINUTE, "1 Jun, 2019") except BinanceAPIException as ex: print (ex) for kline in klines: database.insert_data(kline[0],kline[1],kline[2],kline[3],kline[4],kline[5]) print("database filled")
def add_to_favorites(favorite_movie): if request.method == "POST": try: favorite_list = show_favorite() if favorite_movie in favorite_list: flash("You already added this movie", "alert-danger") return redirect(request.referrer) else: insert_data(favorite_movie) flash("Successfully added!", "alert-success") return redirect(request.referrer) except: flash("There was an error try againg", "alert-danger")
def read_json(depth_dict, time_ymd, time_hms, file_order, ask_bid, print_savedb, db): # ASK or BID for i in range(0, len(depth_dict[ ask_bid ])): if print_savedb == "print": print(i) print(depth_dict[ ask_bid ][i]) elif print_savedb == "savedb": #print("print_savedb :: ") #print(depth_dict[ ask_bid ][i]["a"]) #print(str(depth_dict[ ask_bid ][i]["a"]) + " - " + str(depth_dict[ ask_bid ][i]["b"])) database.insert_data(db, time_ymd, time_hms, file_order, ask_bid, i, depth_dict[ ask_bid ][i]["a"], depth_dict[ ask_bid ][i]["b"]) db.commit() else: print("ERROR - read_json()") break;
def join(): if request.method == 'GET': return render_template('join.html') else: id = request.form['id'] pw = request.form['pw'] name = request.form['name'] ret = database.check_id(id) if ret == None: database.insert_data(id, pw, name) return ''' <script> alert("안녕하세요, {}님. 가입을 환영합니다."); location.href='/login'; </script> '''.format(name) else: return '''
def monitoring_UI(self, sec): #Monitoring Function import proc as pr import compare as co import fprint as fp import time import database as db #Create MySql database if not exitsts db.create_database() db.create_database_start() db.create_database_stop() comp = False clist_start = [] clist_stop = [] prev_date = 0 self.loop = True while self.loop: #List all running process proclist, date = pr.proc_list() if comp: #Compare amd analyze prev_list = db.load_data_by_date(prev_date) clist_start, clist_stop = co.compare_process( proclist, prev_list) #Print Status Log - to the screen and to StatusLog.txt if clist_start: fp.print_list(clist_start) db.insert_data_start(clist_start) '''for item in clist_start[:-2]: self.status.addItem(str(item))''' if clist_stop: fp.print_list(clist_stop) db.insert_data_stop(clist_stop) '''for item in clist_stop[:-2]: self.status.addItem(str(item))''' if not clist_start: comp = False else: comp = True # Insert All Running Process to database and sleep db.insert_data(proclist) prev_date = date time.sleep(sec)
def phonebook_insert(): if request.method == 'POST': name = request.form.get('name') telno = request.form.get('telno') address = request.form.get('address') is_success = database.insert_data(name, telno, address) return is_success + "<br> <a href=\'/phonebook\'>Go To Main Page!</a>" return render_template('PhoneBook_Insert.html')
def submit_query(update_query,select_query,num_of_rows,user_id,app,db_name): print ("Received the request",update_query) date_submitted = datetime.now().strftime('%Y-%m-%d') status = 'SUBMITTED' query = (q.query_list['insert_query'],(update_query,select_query,num_of_rows,user_id,app, db_name ,date_submitted, status)) result = db.insert_data('sqlite',CONN,query) print ("The result is", result) return result
def GetMessage(service, user_id, msg_id, count): """Get a Message with given ID. Args: service: Authorized Gmail API service instance. user_id: User's email address. The special value "me" can be used to indicate the authenticated user. msg_id: The ID of the Message required. Returns: A Message. """ label_tags = ['From', 'Date', 'Subject', 'To'] try: message = service.users().messages().get(userId=user_id, id=msg_id).execute() row_info = [] for i in message['payload']['headers']: if i["name"] in label_tags: # print(i["name"]," ",i["value"]) if i["name"] == "Date": row_info.append(parse(i["value"])) else: row_info.append(i["value"]) # row_info[1]=parse(row_info[1]) # print(row_info[1]) # message = service.users().messages().get(userId=user_id, id=msg_id, # format='raw').execute() # msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII')) # html_body = email.message_from_string(msg_str.decode('utf-8')) # text = RemoveTags(str(html_body)) # # print(text) # row_info.extend((text,count)) row_info.extend((msg_id, count)) database.insert_data(tuple(row_info)) # print(message) return message except errors.HttpError as error: print('An error occurred: %s' % error)
def home(): city = request.args.get("city") if request.args.get('btn') == "查询": try: result = retrieve_data(city) return render_template('weather.html', result=result) except TypeError: try: location, weather, temper, day = fetchWeather(city) insert_data(location, weather, temper, day) result = ''' 地点: {}, 天气: {}, 温度: {}, 更新时间: {}/n'''.format(location, weather, temper, day) return render_template('weather.html', result=result) except KeyError: result = "Invalid command. Type 'help' to check the available commands." return render_template('404.html', result=result) elif request.args.get('btn') == "帮助": return render_template('help.html') elif request.args.get('btn') == "历史": history = get_history() return render_template('history.html', result=history) elif request.args.get('btn') == "更新": try: location, weather = city.split(' ') weather_list = ["晴", "小雨", "大雨", "阴天", "大雪", "中雪", "小雪"] if weather in weather_list: update(location, weather) return render_template('update.html') else: result = "请正确输入信息,例如:[北京 多云]" return render_template('404.html', result=result) except ValueError: result = "Invalid command. Type 'help' to check the available commands." return render_template('404.html', result=result) else: result = "Invalid command. Type 'help' to check the available commands." return render_template('404.html', result=result)
def addProduc(): a = e6.get() b = e1.get() c = e2.get() d = e3.get() e = e4.get() res = database.insert_data(a, b, c, d, e) if res: messagebox.showinfo("Information", "Record Inserted")
def api_upload(): try: f = request.files['photo'] # get the route of the uploaded picture result = test.Test(f) # deal the picture with nervous network final = result.work() # get the result name = request.form.get("name") # get document's name if f and allowed_file(f.filename): now = datetime.datetime.now() otherStyleTime = now.strftime( "%Y%m%d%H%m") #get the upload time and turn it to string form database.insert_data(name, otherStyleTime, str(final)) database.test() # could be used to check data in the table. para = { 'result': str(final), "name": name } #prepare the parameter for the webpage return render_template("template.html", para=para) # return the recongnition result except Exception as e: error = {'reason': str(e)} # print error track return render_template('errorpage.html', error=error)
def into_database(self, ut=None): """ 对URL查重后存入数据库 :param ut: url and title :return: """ print 'into_database' if ut: if isinstance(ut, str): temp={'URL':ut, 'READ':'NO'} if not db.is_url_exist(self.connect, ut): db.insert_data(self.connect, temp) elif isinstance(ut, dict): ut['Read'] = 'NO' if not db.is_url_exist(self.connect, ut['URL']): db.insert_data(self.connect, ut) elif isinstance(ut, list): for tmp in ut: tmp['Read'] = 'NO' if not db.is_url_exist(self.connect, tmp['URL']): db.insert_data(self.connect, tmp) else: continue else: print "ERROR TYPE"
def post_location(): pin = request.form['pin'] addr = request.form['address'] city = request.form['city'] lat = float(request.form['lat']) lng = float(request.form['long']) res = insert_data(key=pin, place_name=addr, admin_name1=city, latitude=lat, longitude=lng) if res: return redirect(url_for('success')) else: return redirect(url_for('pin_exist'))
def submit_query(template_query,name,app,db_name,rows): query_match_group = pattern_template.match(template_query) print ("The match group",query_match_group) table_group = query_match_group.group('tablename') column_group = query_match_group.group('columnnames') where_group = query_match_group.group('conditions') #Get Columns columns = [] for column_value in column_group.split(','): column_match_group = column_value_pattern.match(column_value) columns.append(column_match_group.group('column').strip()) where = [] #for conditions in where_group: print (repr(where_group)) all_columns = where_pattern.findall(where_group) where = all_columns query = (q.query_list['insert_query_template'],(template_query,str(columns),str(where),name,db_name,app,rows,)) result = db.insert_data('sqlite',CONN,query) print (result) return result
def new_data(): point = request.args.get('point', None) place = request.args.get('place', None) data_note = "" observer = request.args.get('observer', 'anon') obs_type = request.args.get('obs_type', 'REST') temp = request.args.get('temp') if not utilities.is_float(temp): return "No valid temperature provided" if point is None and place is not None: point = utilities.coord_from_str(place) data_note = f"point extrapolated from: {place}" if point is None: return "No location information provided" elif isinstance(point, str) and place is None: p = point.split(',') point = [p[0], p[1]] if point is not None: obs = db.upsert_observer( DB_CON, observer, obs_type, "", ) temp = db.insert_data(DB_CON, obs, db.format_point(point), temp, data_note) return f"Successfully recorded temperate observation #{temp[0]}" return "Unexpected parameter configuration, ignoring input"
def save_price(self): create_db() create_table() while True: start = time.time() orderbook = self._get_orderbook(self.market_instance) end_tick = time.time() if orderbook != False: is_insert = insert_data(orderbook, self.market, self.symbol, end_tick - start) if is_insert: end = time.time() print('Get tick at', datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'CPU Time:', end - start) else: end = time.time() print('Fail to get tick at', datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'CPU Time:', end - start)
rate_cnt += 1 # Pulses per time # tot_cnt += 1 # Total pulses try: input_value = GPIO.input(pin_number) #sys.stdout.write(str(input_value)), except KeyboardInterrupt: #Look for exit command print('\nCTRL C - Exiting nicely') GPIO.cleanup() sys.exit() minutes += 1 value = rate_cnt * constant print('\nLitres / min', round(value, 4)) #print('\nTotal Litres', round(tot_cnt * constant, 4)) print('\nTime (min & clock)', minutes) # raw_input("Press the <ENTER> key to continue...") GPIO.cleanup() if (database.insert_data({ 'sensor_id': 'WATERFLOW_' + util.getserial(), 'sensor_name': 'water flow 0', 'sensor_type': 'YFS201', 'value': value })): print('Water flow inserted successfully') #print(input_value) # output to pin 12 #GPIO.output(12, GPIO.HIGH)
def dump(): while True: t, h = get_values() insert_data(t, h) sleep(120) print('Data inserted into database')
from scrape import scrape_info from database import connect_database, insert_data, create_table, get_all_data # main function if __name__ == "__main__": # URL of the video url = "https://www.youtube.com/watch?v=Yw4rkaTc0f8" # create table if not exists create_table() # calling the function to scrape data data_info = scrape_info(url) # insert data into database insert_data(data_info) # get all data (displaying data) get_all_data()
def run(city): weather_data = api_call.get_weather_data(f'{city}') # Fill database with weather data database.insert_data(*[each for each in weather_data.values[0]])
global sensor1, sensor2, sensor3, sensor4 logger.debug('Sending signal to request sensor data.') # When the Arduino receives the character 'z' it will read the sensor data. ser.write(b'z') try: data = ser.readline().decode('utf-8') data = data.split() sensor1 = data[0] sensor2 = data[1] sensor3 = data[2] sensor4 = data[3] return (sensor1, sensor2, sensor3, sensor4) except Exception as e: logger.exception(e) return while True: # Get the current date and time from the datetime module. date_time = datetime.now().strftime('%Y/%m/%d %H:%M:%S') read_sensors() # Save data in a Postgresql database. insert_data(date_time, sensor1, sensor2, sensor3, sensor4) # Set an interval by seconds. time.sleep(600) print(date_time) ser.close()
def sendNewMeme(): database.insert_data(str(request.args.get('name')), str(request.args.get('description'))) return "hi"