def process_data(self, raw_data): print(raw_data) raw_data = raw_data.replace("'", " ") db.insert_to_tweet(raw_data) location = self.location_draw(raw_data) self.save_location(location=location) self.save_txt(raw_data)
def on_url_clicked(self, widget, url, event): if self.selectparent and self.tid is not None: DBConnection.toggle_dependency(get_task_id(url), self.tid) self.check_dependency(get_task_id(url)) self.check_dependency(self.tid) self.selectparent = False self.refresh_view() elif self.selectchild and self.tid is not None: DBConnection.toggle_dependency(self.tid, get_task_id(url)) self.selectchild = False self.check_dependency(self.tid) self.check_dependency(get_task_id(url)) self.refresh_view() else: self.pid = get_project_id(url) self.tid = get_task_id(url) self.aid = None taskdata = DBConnection.get_data("task", self.tid) self.TaskNameEntry.set_text(taskdata[1]) for index, status in enumerate(statuslist): if taskdata[3] == status: self.TaskStatusCombo.set_active(index) self.TaskDueDateEntry.set_text(taskdata[4]) self.refresh_actionlist() self.set_interface_lock(False)
def parseVideoName(): for file in os.listdir("Videos/"): if file.endswith(".avi"): videoName = os.path.join("../modelPipeline/Videos/", file) videoProperties = videoName.split("_") # OJOOO videoNFrames = 5 shoppingCenter = videoProperties[2] store = videoProperties[3] videoDate = videoProperties[4] videoStart = videoProperties[5] videoStart = datetime.strptime(videoStart, '%H-%M-%S').time() videoStart = datetime.combine(datetime.today(), videoStart) # Creacion del nombre del directorio a guardar el csv storeDir = createSubFolders(shoppingCenter, store) # Modificacion de la ruta storeDir = "../modelPipeline/" + storeDir # Llamar modelo #callModel(videoName) # mover resultados a carpeta # Se obtienen las ids del shopping center y de la tienda shoppingID, shopID = DBConnection.GetIDS(shoppingCenter, store) for file in os.listdir("../computer_vision/"): if file.endswith(".csv"): csvName = file csvName = "../computer_vision/" + csvName csvDir = AlterCSV.mainAlterCSV(csvName, shopID, shoppingID, storeDir, videoStart, videoNFrames, videoDate) DBConnection.SendToDB(csvDir)
def analyzeData(hashtag): count = 50 for tweet in tweepy.Cursor(t.search, q=hashtag).items(): if isEnglish(tweet.text): try: isTroll = txtAnlyze.sentiment(tweet.text) print(isTroll) if db.isExistingUser(tweet.user.screen_name) and isTroll: db.updateUser(tweet.user.screen_name) elif (not db.isExistingUser( tweet.user.screen_name)) and isTroll: db.insertUser(tweet.user.screen_name, 1) else: print("Insert") db.insertUser(tweet.user.screen_name) db.insertTweet(tweet.id, tweet.user.screen_name, tweet.text[:50], tweet.created_at, isTroll) count -= 1 if count == 0: return except tweepy.TweepError as e: print(e.reason) except StopIteration: return
def index(): try: recommend_shows_list = DBConnection.execute_query( queries.get_top_3_city_per_continent_by_event_number()) recommend_shows_africa = _get_recommended_shows( recommend_shows_list, 'Africa') recommend_shows_asia = _get_recommended_shows(recommend_shows_list, 'Asia') recommend_shows_europe = _get_recommended_shows( recommend_shows_list, 'Europe') recommend_shows_north_america = _get_recommended_shows( recommend_shows_list, 'North America') recommend_shows_south_america = _get_recommended_shows( recommend_shows_list, 'South America') recommend_shows_north_oceania = _get_recommended_shows( recommend_shows_list, 'Oceania') recommend_artists_list = DBConnection.execute_query( queries.get_top_3_city_per_continent_by_artist_followers()) worse_artists_list = DBConnection.execute_query( queries.get_last_2_city_per_continent_by_artist_followers()) recommend_artists_africa = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'Africa') recommend_artists_asia = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'Asia') recommend_artists_europe = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'Europe') recommend_artists_north_america = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'North America') recommend_artists_south_america = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'South America') recommend_artists_north_oceania = _get_recommended_genres( recommend_artists_list, worse_artists_list, 'Oceania') city_genre_recommend = DBConnection.execute_query( queries.get_best_city_per_main_genre()) for record in city_genre_recommend: record['genre'] = record['genre'].title() recommend_city_genre = RecommendGenreTable(city_genre_recommend, classes=['table']) return render_template( 'index.html', recommend_shows_africa=recommend_shows_africa, recommend_shows_asia=recommend_shows_asia, recommend_shows_europe=recommend_shows_europe, recommend_shows_north_america=recommend_shows_north_america, recommend_shows_south_america=recommend_shows_south_america, recommend_shows_north_oceania=recommend_shows_north_oceania, recommend_artists_africa=recommend_artists_africa, recommend_artists_asia=recommend_artists_asia, recommend_artists_europe=recommend_artists_europe, recommend_artists_north_america=recommend_artists_north_america, recommend_artists_south_america=recommend_artists_south_america, recommend_artists_north_oceania=recommend_artists_north_oceania, recommend_city_genre=recommend_city_genre) except Exception as e: return render_template('server_error.html', error_str=str(e))
def on_status_change_task(self, combobox): if self.tid is not None: taskdata = DBConnection.get_data("task", self.tid) if taskdata[3] != get_active_text(combobox): DBConnection.update_table( "task", "status = '%(status)s'" % {"status": get_active_text(combobox)}, self.tid) self.cascade(self.tid) self.refresh_view()
def setup_db(self, db_path=''): self.db_path = db_path if db_path == "": self.db = DBConnection.connect() self.images = self.get_collection() else: self.db = DBConnection.connect_db(db_path) self.images = self.get_collection() pprint(self.images)
def __init__(self): self.debug = False self.dbc = DBConnection() # creates the two tables that hold the differences from one version to another #self.dbc.create_evolution_tables() # calculates and writes the evolutionary steps of the # post blocks into postblockevolution self.postblock_evolution()
def cascade(self, tid, trace=None): if trace is None: trace = [] if tid not in trace: trace.append(tid) for child in DBConnection.get_childs(tid): parent_status = DBConnection.get_task_status(tid) child_status = DBConnection.get_task_status(child) if child_status == hold and parent_status == release_trigger: valid_update = True for status in DBConnection.get_status_all_parents(child): if DBConnection.get_task_status(status) != done: valid_update = False if valid_update: DBConnection.update_table( "task", "status = '%(status)s'" % {"status": released}, child) self.cascade(child, trace) elif child_status not in ( ignore, done) and parent_status != release_trigger: DBConnection.update_table( "task", "status = '%(status)s'" % {"status": hold}, child) self.cascade(child, trace)
def UpdateBuildTable(db_name): curs = DBConnection(db_name).curs for k in BuildConfig.config.keys(): query = "SELECT * from build where build_name='%s';" % k; #print query result = curs.execute(query) if result != 0: v = BuildConfig.config[k] query = "UPDATE build set hostname='%s', os='%s', 64bit=%d, compiler='%s', debug=%d, optimized=%d, static=%d, minimum=%d where build_name='%s';" % (v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7], k) #print query curs.execute(query)
def remove_task(self, widget): taskdata = DBConnection.get_data("task", self.tid) if dialog.show_confirm_dialog( "Are you sure you want to delete '%(taskname)s'? All dependencies and actions for this task will be removed." % {"taskname": taskdata[1]}): DBConnection.remove_task(self.tid) for child in DBConnection.get_childs(self.tid): self.check_dependency(child) self.refresh_view() self.clear_actions() self.clear_task_properties()
def on_status_change_project(self, combobox): if self.pid is not None: taskdata = DBConnection.get_data("project", self.pid) if taskdata[2] != get_active_text(combobox): DBConnection.update_table( "project", "status = '%(status)s'" % {"status": get_active_text(combobox)}, self.pid) self.ProjectStatusCombo.handler_block(self.handlerid) self.refresh_projects( self.get_selection_strings(self.tagtree.get_selection())) self.ProjectStatusCombo.handler_unblock(self.handlerid)
def remove_project(self, widget): if self.pid is not None: projectdata = DBConnection.get_data("project", self.pid) if dialog.show_confirm_dialog( "Are you sure you want to delete '%(projectname)s'? All items within the project will be removed." % {"projectname": projectdata[1]}): DBConnection.remove_project(self.pid) selection = self.tagtree.get_selection() self.on_tagtreeview_selection_changed(selection) self.refresh_tags() self.clear_actions() self.clear_task_properties()
def get_ingredients(): all_ingredients = DBConnection.execute_query(queries.all_ingredient_names) ingredients_names_list = [ ingredient['name'] for ingredient in json.loads(all_ingredients) ] ingredients_dict = {"ingredients": ingredients_names_list} return json.dumps(ingredients_dict)
def get_glass_categories(): glass_categories = DBConnection.execute_query(queries.get_glasses_types()) glass_categories = [ category['glass_type'] for category in json.loads(glass_categories) ] categories_dict = {"glass_types": glass_categories} return json.dumps(categories_dict)
def loadPlayerStatsDataFrom2011(): conn = DBConnection.getConnection() cur.executemany("INSERT INTO PLAYER_STATS_2011_12 (Name,Season,Age,Tm,Lg,Pos,G,GS,MP,FG,FGA,FG%,3P,3PA,3P%,2P,2PA,2P%,eFG%,FT,FTA,FT%,ORB,DRB,TRB,AST,STL,BLK,TOV,PF,PTS) VALUES ( )) conn.commit() print('Load PLAYER_SALARY done.') conn.close() return
def StartRange(self, logic, mss_code): if logic == 1 or logic == 2: db = DBConnection.DBCon() db.setAll('localhost', 'root', '123123', 'ReallyFine', 'utf8') conn = db.ConnDB() cur = conn.cursor() startIDX = 2 sql = "select max(created_time) from public_data_pm25 where created_date = '" sql += self.searchDate + "' and mss_code = '" + str(mss_code) + "'" cur.execute(sql) data = cur.fetchone()[0] if data is None: cur.close() conn.close() return startIDX else: startIDX = data + 2 cur.close() conn.close() return startIDX
def fill_project_properties(self): projectdata = DBConnection.get_data("project", self.pid) self.ProjectNameEntry.set_text(projectdata[1]) for index, status in enumerate(statuslist): if projectdata[2] == status: self.ProjectStatusCombo.set_active(index) self.ProjectPriorityEntry.set_text(str(projectdata[3]))
def results(): try: if request.method == "GET": return render_template('submit_form.html') else: checkbox_values = request.form.getlist('input_checkbox') if not checkbox_values: shown_chosen_concerts = "No concerts were chosen" else: event_artist = [] checkbox_values = [int(event_id) for event_id in checkbox_values] for event_id in checkbox_values: for concert in matching_concerts: if event_id == concert['id']: event_artist.append((event_id, concert['artist_id'])) if len(event_artist) == 1: event_artist.append((-1, -1)) chosen_concerts = DBConnection.execute_query( queries.query_get_summary(tuple(event_artist))) _format_displayed_result_concerts(chosen_concerts) shown_chosen_concerts = ResultsConcertsTable(chosen_concerts, classes=['table']) return render_template('results.html', chosen_concerts=shown_chosen_concerts) except Exception as e: return render_template('server_error.html', error_str=str(e))
def show_action_dialog(tid, aid=None): actiondialog = gtk.Dialog(title="Action", flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, buttons=(gtk.STOCK_SAVE, gtk.RESPONSE_ACCEPT, gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)) actiondialog.vbox.set_size_request(250, 135) label = gtk.Label() label.set_alignment(0, 0) if aid is None: label.set_markup("<big><b>Add Action</b></big>") else: label.set_markup("<big><b>Edit Action</b></big>") actiondialog.vbox.pack_start(label, False) label = gtk.Label("Name") label.set_alignment(0.0, 0.0) actiondialog.vbox.pack_start(label, False) actiondialog.actionentry = gtk.Entry(max=50) actiondialog.actionentry.set_activates_default(True) actiondialog.vbox.pack_start(actiondialog.actionentry, False) # label = gtk.Label("Warning Date") # label.set_alignment(0.0, 0.0) # actiondialog.vbox.pack_start(label, False) # actiondialog.warningentry = gtk.Entry(max=50) # actiondialog.vbox.pack_start(actiondialog.warningentry, False) actiondialog.set_position(gtk.WIN_POS_CENTER_ALWAYS) actiondialog.set_default_response(gtk.RESPONSE_ACCEPT) if aid is not None: actiondata = DBConnection.get_data("action", aid) actiondialog.actionentry.set_text(actiondata[1]) actiondialog.show_all() response = actiondialog.run() if response == gtk.RESPONSE_ACCEPT: if aid is None: DBConnection.add_action(actiondialog.actionentry.get_text(), tid, 0, "1901-01-01") else: DBConnection.update_table("action", "name = '%(name)s', tid = %(tid)s" % { "name": actiondialog.actionentry.get_text().replace("'", "''"), "tid": tid}, aid) actiondialog.destroy() return True else: actiondialog.destroy() return False
def get_most_used_non_alcoholic(): most_used_glass_drinks = DBConnection.execute_query( queries.query_most_used_non_alcoholic()) most_used_dict = { 'drinks': most_used_glass_drinks, 'glass': json.loads(most_used_glass_drinks)[0]['glass_type'] } return json.dumps(most_used_dict)
def get_meal_categories(): cocktail_categories = DBConnection.execute_query( queries.get_meal_categories()) cocktail_categories = [ category['category'] for category in json.loads(cocktail_categories) ] categories_dict = {"allowed_meal_categories": cocktail_categories} return json.dumps(categories_dict)
def common_ingredients(): if 'common_ingredients' not in request.args: return common_ingredients = json.loads(request.args.get('common_ingredients')) common_ingredients = DBConnection.execute_query( queries.query_common_ingredients(common_ingredients)) common_ingredients_dict = {'common_ingredients': common_ingredients} return json.dumps(common_ingredients_dict)
def createPlayerStatsFrom2011To12(): sql = '''CREATE TABLE IF NOT EXISTS PLAYER_STATS_2011_12( Name,Season,Age,Tm,Pos)''' conn = DBConnection.getConnection() conn.execute(sql) print('PLAYER_STATS_2011_12 table created...') conn.close() return
def getAuthorInfos(author_name): search_query = scholarly.search_author(author_name) author = next(search_query).fill() print("Name:", author.name) print("Affiliation:", author.affiliation) print("Email:", author.email) print("Interests", author.interests) print("H_index:", author.hindex) print("Number of citations:", author.citedby) DBConnection.insert_info('inforet', author.name, author.name, author.email, author.affiliation, author.hindex, author.citeby, author.interests) print("List of publications:") for i, pub in enumerate(author.publications): if "year" in pub.bib and "title" in pub.bib: print(i + 1, pub.bib["title"], pub.bib["year"])
def full_text_search(): # to be completed if 'query' not in request.args: return query = json.loads(request.args.get('query')) query = DBConnection.execute_query( queries.query_full_text_search(query.split(' '))) full_search_dic = {'drinks': query} return json.dumps(full_search_dic)
def get_categories_by_average_number_of_ingredients(): if 'categories' not in request.args: return categories = request.args.get('categories') categories = json.loads(categories) categories_by_avg_ingredients = DBConnection.execute_query( queries.query_categories_by_average_number_of_ingredients(categories)) categories_dict = {'categories': categories_by_avg_ingredients} return json.dumps(categories_dict)
def createTeamStats(): sql = '''CREATE TABLE IF NOT EXISTS TEAM_STATS( Name,Season,Lg,Team,W,L,W_L_Ratio,Finish,SRS,Pace,Rel_Pace,ORtg,Rel_ORtg,DRtg,Rel_DRtg,Playoffs,Coaches,Top_WS )''' conn = DBConnection.getConnection() conn.execute(sql) print('TEAM_STATS table created...') conn.close() return
def createPlayerProfile(): sql = '''create table if not exists player_profile( Player,FromYear,ToYear,Pos,Ht,Wt,DOB,College )''' conn = DBConnection.getConnection() conn.execute(sql) print('player_profile table created...') conn.close() return
def createPlayerSalary(): sql = '''CREATE TABLE IF NOT EXISTS PLAYER_SALARY( Name,Season,Team,Lg,Salary )''' conn = DBConnection.getConnection() conn.execute(sql) print('PLAYER_SALARY table created...') conn.close() return
def createPlayerStats(): sql = '''CREATE TABLE IF NOT EXISTS PLAYER_STATS( Name,Season,Age,Tm,Lg,Pos,G,GS,MP,FG,FGA,FG%,3P,3PA,3P%,2P,2PA,2P%,eFG%,FT,FTA,FT%,ORB,DRB,TRB,AST,STL, BLK,TOV,PF,PTS )''' conn = DBConnection.getConnection() conn.execute(sql) print('PLAYER_STATS table created...') conn.close() return
def loadPlayerProfileData(): conn = DBConnection.getConnection() path = currentpath + '/data/player_since_2000_profile.csv' infile = readCSVFile(path) to_db = [(row['Player'],row['From'],row['To'], row['Pos'], row['Ht'],row['Wt'],row['DOB'],row['College']) for row in infile] cur = conn.cursor() cur.executemany("INSERT INTO player_profile (Player,FromYear,ToYear,Pos,Ht,Wt,DOB,College) VALUES (?,?,?,?,?,?,?,?);", to_db) conn.commit() print('Load Player_profile done.') conn.close() return
def readTeamsProfile(): conn = DBConnection.getConnection() cursor = conn.execute("SELECT * from TEAMS_PROFILE") for row in cursor: print "Team = ", row[0] print "TO_YEAR = ", row[4] print "CHAMP_WINS = ", row[13], "\n" print "Operation done successfully"; conn.close() return
def loadTeamsProfileData(): conn = DBConnection.getConnection() path = currentpath + '/data/teams_profile.csv' infile = readCSVFile(path) to_db = [(row['Team'],row['Franchise'],row['Lg'], row['From'], row['To'], row['Yrs'], row['G'], row['W'], row['L'], row['W/L%'],row['Plyfs'],row['Div'],row['Conf'],row['Champ'] ) for row in infile] cur = conn.cursor() cur.executemany("INSERT INTO TEAMS_PROFILE (TEAM, FRANCHISE,LEAGUE, FROM_YEAR,TO_YEAR,YEARS,GAMES,WIN,LOSS,WIN_LOSS_RATIO,PLAYOFFS,DIV_WINS,CONF_WINS,CHAMP_WINS) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?);", to_db) conn.commit() print('Load Teams Profile done.') conn.close() return
def loadPlayerSalaryData(): conn = DBConnection.getConnection() path = currentpath + '/data/players_salary.csv' infile = readCSVFile(path) to_db = [(row['Name'],row['Season'],row['Team'], row['Lg'], row['Salary']) for row in infile] cur = conn.cursor() cur.executemany("INSERT INTO PLAYER_SALARY (Name,Season,Team,Lg,Salary) VALUES (?,?,?,?,?);", to_db) conn.commit() print('Load PLAYER_SALARY done.') conn.close() return
def readPlayerSalary(): conn = DBConnection.getConnection() cursor = conn.execute("SELECT * from PLAYER_SALARY") for row in cursor: print "Name = ", row[0] print "Season = ", row[1] print "Salary = ", row[4], "\n" print "Operation done successfully"; conn.close() return
def readTeamStats(): conn = DBConnection.getConnection() cursor = conn.execute("SELECT * from TEAM_STATS") for row in cursor: print "Name = ", row[0] print "W/L% = ", row[6] print "Top WS = ", row[17], "\n" print "Operation done successfully"; conn.close() return
def loadTeamStatsData(): conn = DBConnection.getConnection() path = currentpath + '/data/team_season_stats.csv' infile = readCSVFile(path) to_db = [(row['Name'],row['Season'],row['Lg'], row['Team'], row['W'], row['L'], row['W/L%'], row['Finish'], row['SRS'], row['Pace'],row['Rel_Pace'],row['ORtg'],row['Rel_ORtg'],row['DRtg'],row['Rel_DRtg'],row['Playoffs'],row['Coaches'], row['Top WS'] ) for row in infile] cur = conn.cursor() cur.executemany("INSERT INTO TEAM_STATS (Name,Season,Lg,Team,W,L,W_L_Ratio,Finish,SRS,Pace,Rel_Pace,ORtg,Rel_ORtg, DRtg,Rel_DRtg,Playoffs,Coaches,Top_WS) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", to_db) conn.commit() print('Load TEAM_STATS done.') conn.close() return
def loadPlayerStatsData(): conn = DBConnection.getConnection() path = currentpath + '/data/player_since_2000_stats.csv' infile = readCSVFile(path) to_db = [(row['Name'],row['Season'],row['Team'], row['Lg'], row['Salary']) for row in infile] cur = conn.cursor() cur.executemany("INSERT INTO PLAYER_SALARY (Name,Season,Age,Tm,Lg,Pos,G,GS,MP,FG,FGA,FG%,3P,3PA,3P%,2P,2PA,2P%,eFG%,FT,FTA,FT%,ORB,DRB,TRB,AST,STL, BLK,TOV,PF,PTS) VALUES (?,?,?,?,?);", to_db) conn.commit() print('Load PLAYER_SALARY done.') conn.close() return
def acisa_obj(): ReturningObj = {} cnxn = DBConnection.connector() cursor = cnxn.cursor() #cmd = "SELECT [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],SUM([VolSum]) as Vol,AVG([avg_speed]) as Speed,[data_datetime],AVG([Latitude]) as Lat,AVG([Longitude]) as Lon FROM [snaps].[dbo].[vw_aggredated_volume_speed], [snaps].[dbo].[SNAPsLocation] where [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA] = [snaps].[dbo].[SNAPsLocation].[ACISA] and [data_datetime] > '" + StartTime + "' and [data_datetime] < '" + EndTime + "' group by [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],[data_datetime]" cmd = "SELECT [ACISA] FROM [snaps].[dbo].[SNAPsLocation]" cursor.execute(cmd) while 1: row = cursor.fetchone() if not row: break ReturningObj.update({str(row.ACISA): row.ACISA}) return ReturningObj
def createTeamsProfile(): sql = '''CREATE TABLE IF NOT EXISTS TEAMS_PROFILE( TEAM CHAR(20), FRANCHISE CHAR(50), LEAGUE CHAR(10), FROM_YEAR INT, TO_YEAR INT, YEARS INT, GAMES INT, WIN INT, LOSS INT, WIN_LOSS_RATIO REAL, PLAYOFFS INT, DIV_WINS INT, CONF_WINS INT, CHAMP_WINS INT )''' conn = DBConnection.getConnection() conn.execute(sql) print('TEAMS_PROFILE table created...') conn.close() return
def highchartdata(acisa, TimeString): cnxn = DBConnection.connector() cursor = cnxn.cursor() StartTime = str(datetime.datetime.strptime(TimeString.split(' - ')[0], '%m/%d/%Y %I:%M %p')) EndTime = str(datetime.datetime.strptime(TimeString.split(' - ')[1], '%m/%d/%Y %I:%M %p')) #SQLcmd = "SELECT * FROM snaps.SNAPs_history WHERE Time > '" + StartTime + "' and Time < '" + EndTime + "'" cmd = "SELECT [ACISA],[laneDir],[VolSum],[avg_speed],[data_datetime] FROM [snaps].[dbo].[vw_aggredated_volume_speed] where [ACISA]=" + str(acisa) + " and [data_datetime]> '" + StartTime + "' and [data_datetime] < '" + EndTime + "'" + " order by [data_datetime], [laneDir]" cursor.execute(cmd) MainDict = {} while 1: row = cursor.fetchone() if not row: break tmpDict = { 'type': 'area', 'name': '', 'pointInterval': 3600 * 1000, 'pointStart': 0, 'data': [] } if str(row.ACISA) + row.laneDir in MainDict: MainDict[str(row.ACISA) + row.laneDir]['data'].append(row.VolSum) else: starttime = row.data_datetime tmpDict.update({ 'type': 'area', 'name': str(row.ACISA) + '-' + row.laneDir, 'pointInterval': 3600 * 1000, 'pointStart': calendar.timegm(starttime.timetuple()), 'data': [row.VolSum] }) MainDict[str(row.ACISA) + row.laneDir] = tmpDict return [MainDict[i] for i in MainDict]
def connectToDb(self): host = unicode(self.ui.lineEdit_host.text()) db = unicode(self.ui.lineEdit_db.text()) user = unicode(self.ui.lineEdit_user.text()) pw = unicode(self.ui.lineEdit_pw.text()) connName = unicode(self.ui.comboBox_connection.currentText()) if DBConnection.connect(host, user, pw, db, connName): connParams = {'host': host, 'db': db, 'user': user} try: c = config.config['app_db_connections'] except KeyError: c = config.config['app_db_connections'] = {} if connName != '': c[connName] = connParams config.config.write() self.accept() else: QtGui.QMessageBox.critical(self, 'Verbindung fehlgeschlagen', 'Verbindung fehlgeschlagen', QtGui.QMessageBox.Ok)
def SNAPS_Former(TimeString): #StartTime = str(datetime.strptime(TimeRange.split(' - ')[0], '%m/%d/%Y %I:%M %p')) #EndTime = str(datetime.strptime(TimeRange.split(' - ')[1], '%m/%d/%Y %I:%M %p')) cnxn = DBConnection.connector() cursor = cnxn.cursor() StartTime = str(datetime.strptime(TimeString.split(' - ')[0], '%m/%d/%Y %I:%M %p')) EndTime = str(datetime.strptime(TimeString.split(' - ')[1], '%m/%d/%Y %I:%M %p')) #cmd = "SELECT [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],SUM([VolSum]) as Vol,AVG([avg_speed]) as Speed,[data_datetime],AVG([Latitude]) as Lat,AVG([Longitude]) as Lon FROM [snaps].[dbo].[vw_aggredated_volume_speed], [snaps].[dbo].[SNAPsLocation] where [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA] = [snaps].[dbo].[SNAPsLocation].[ACISA] and [data_datetime] > '" + StartTime + "' and [data_datetime] < '" + EndTime + "' group by [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],[data_datetime]" cmd = "SELECT TOP 1000 [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],SUM([VolSum]) as Vol,AVG([avg_speed]) as Speed,[data_datetime],AVG([Latitude]) as Lat,AVG([Longitude]) as Lon FROM [snaps].[dbo].[vw_aggredated_volume_speed], [snaps].[dbo].[SNAPsLocation] where [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA] = [snaps].[dbo].[SNAPsLocation].[ACISA] and [data_datetime]> '" + StartTime + "' and [data_datetime] < '" + EndTime + "'" + " group by [snaps].[dbo].[vw_aggredated_volume_speed].[ACISA],[data_datetime]" cursor.execute(cmd) MainReturnDict = { "bbox": [ 171.65283, -43.9078, 173.09492, -42.04222 ], "crs": { "properties": { "code": "4326" }, "type": "EPSG" }, "type": "FeatureCollection", "features": [] } while 1: initDict = { "geometry": { #"coordinates": [-77.07570999999999, 38.86651], "coordinates": [], "type": "Point" }, "geometry_name": "origin_geom", "id": "", "properties": { "agency": "WEL(GNS_Primary)", #"bbox": [-77.07570999999999, 38.86651, -77.07570999999999, 38.86651], "bbox": [], #"depth": 14.23, "depth": 14.23, "latitude": 38.86651, "longitude": -77.07570999999999, #"magnitude": 3.56, "magnitude": 3.56, "magnitudetype": "Ml", #"origintime": "2010-09-30T19:59:10Z", "origintime": "2010-09-30T19:59:10Z", "phases": 15, "publicid": "3380927", "status": "reviewed", "type": "earthquake", "updatetime": "2012-04-26T09:01:00Z" }, "type": "Feature" } row = cursor.fetchone() if not row: break if row.Vol < 50: continue if not row.Speed: continue initDict.update({'geometry': {'coordinates': [row.Lon, row.Lat], "type": "Point"}}) initDict.update({'id': str(row.ACISA) + '.' + str(row.data_datetime)}) initDict.update({'properties': { "agency": "WEL(GNS_Primary)", "bbox": [row.Lon, row.Lat, row.Lon, row.Lat], #"depth": 14.23, "depth": row.Speed / 5.0, "latitude": row.Lat, "longitude": row.Lon, #"magnitude": 3.56, "magnitude": math.log(row.Vol / 50.0) * 1.5, "magnitudetype": "Ml", #"origintime": "2010-09-30T19:59:10Z", "origintime": str(row.data_datetime), "phases": 15, "publicid": "3380927", "status": "reviewed", "type": "earthquake", "updatetime": "2012-04-26T09:01:00Z" }}) MainReturnDict["features"].append(initDict) return MainReturnDict
found = query.value(0).toInt()[0] if found: return lib.DienstnehmerEreignis.DienstnehmerEreignis(found) else: return None if __name__ == '__main__': import sys import DBConnection import lib.Dienstnehmer # import lib.DienstnehmerEreignisTyp print sys.argv DBConnection.connect(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]) dbObj = DienstnehmerEreignis() dbObj.get(48) while dbObj.next(): print dbObj print dbObj.getAntipode() print (("*"*20)+'\n')*5 dbObj.get(25) print dbObj print dbObj.getAntipode() """ dn = lib.Dienstnehmer.Dienstnehmer(21) ereignisse = DienstnehmerEreignis().find('dir_dinid', dn['din_id'])
def CreateTables(db_name): curs = DBConnection(db_name).curs query = "CREATE TABLE IF NOT EXISTS build(build_id SMALLINT NOT NULL AUTO_INCREMENT, build_name VARCHAR(100) NOT NULL, hostname VARCHAR(50), os VARCHAR(20), 64bit TINYINT(1), compiler VARCHAR(20), debug TINYINT(1), optimized TINYINT(1), static TINYINT(1), minimum TINYINT(1), PRIMARY KEY(build_id));" curs.execute(query) query = "CREATE TABLE IF NOT EXISTS build_instance(build_instance_id INT NOT NULL AUTO_INCREMENT, build_id SMALLINT NOT NULL, start_time DATETIME, end_time DATETIME, baseline VARCHAR(20), log_fname VARCHAR(200), test_insert_time DATETIME, compilation_insert_time DATETIME, PRIMARY KEY(build_instance_id));" curs.execute(query) query = "CREATE TABLE IF NOT EXISTS test(test_id SMALLINT NOT NULL AUTO_INCREMENT, test_name VARCHAR(100) NOT NULL, PRIMARY KEY(test_id));" curs.execute(query) query = "CREATE TABLE IF NOT EXISTS test_instance(test_id SMALLINT NOT NULL, build_instance_id INT NOT NULL, status VARCHAR(1), duration_time INT, PRIMARY KEY(test_id, build_instance_id));" curs.execute(query) query = "CREATE TABLE IF NOT EXISTS project(project_id SMALLINT NOT NULL AUTO_INCREMENT, project_name VARCHAR(100) NOT NULL, PRIMARY KEY(project_id));" curs.execute(query) query = "CREATE TABLE IF NOT EXISTS compilation_instance(project_id SMALLINT NOT NULL, build_instance_id INT NOT NULL, skipped TINYINT(1), num_errors INT, num_warnings INT, PRIMARY KEY(project_id, build_instance_id));" curs.execute(query)