def search(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) categories = db.get_activity_type() for cat in categories: print(cat) skills = {0: 'Any', 1: 'Beginner', 2: 'Intermediate', 3: 'Expert', 4: 'Master'} access = {0: 'Public', 1: 'Private'} user_activities = {} db_user_activities = db.get_all_user_activities() i = 0 for u_acts in db_user_activities: value = [u_acts['userid']] key = u_acts['activityid'] if len(user_activities) > 0: if user_activities.get(key) != None: user_activities[key].append(value) else: user_activities[key] = value else: user_activities[key] = value i += 1 if request.method == 'POST': if request.form.get('join', None) is None: activity_name = request.form['activity-name'] results = db.get_activity(name=activity_name) return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, results=results, a=access, skills=skills, categories=categories, maps_key=utilities.get_key('google_maps')) else: db.add_user_activity(user_info[0]['id'], request.form['activity-id'], request.form['activity-private']) if request.form['activity-private'] == '0': utilities.send_email([user_info[0]['email']], 'Activity Joined', 'You successfully joined an activity!') return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, categories=categories, a=access, skills=skills, maps_key=utilities.get_key('google_maps')) else: return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, categories=categories, a=access, skills=skills, maps_key=utilities.get_key('google_maps'))
def create_event(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) categories = db.get_activity_type() if request.method == 'POST': activity_name = request.form['activity-name'] category = request.form['category'] private = request.form['private'] date = request.form['date'] time = request.form['time'] duration = request.form['duration'] latitude = request.form['lat'] longitude = request.form['lng'] num_of_players = request.form['num-of-players'] skill = request.form['skill-level'] datetime = utilities.combine_datetime(date, time) db.add_activity(name=activity_name, category=category, datetime=datetime, duration=duration, latitude=latitude, longitude=longitude, numplayers=num_of_players, skill=skill, private=private, leader=session.get ('user_id'), available=1) utilities.send_email([user_info[0]['email']], 'Activity Created', 'You successfully created an activity!') redirect(url_for('home')) return render_template('create_event.html', key=utilities.get_key('google_maps'), user=user_info, categories=categories)
def bond_turfs(form): folder_name = 'temp_folder_' + str(randint(1000, 10000)) os.makedirs(folder_name) os.makedirs(folder_name + '/temp_folder') center_address = form['center_address'] email = form['email'] est_canvassers = int(form['est_canvassers']) percent_affordable = float(form['percent_affordable']) / 100.0 coords = get_coordinates(center_address, False) skip_addresses = pd.read_csv("bond_skip_addresses.csv") print form print coords afford_units = read_afford_units(coords, skip_addresses) big_afford_units, small_afford_units = split_afford_units(afford_units) main_afford_units, backup_afford_units = main_backup_afford_units( big_afford_units, est_canvassers, percent_affordable) bonus_afford_units = get_bonus_afford_units(small_afford_units, main_afford_units) market_rate_units = get_market_rate_units(coords, afford_units, skip_addresses) market_rate_units = clean_market_rate_units(market_rate_units, coords) big_market_rate_units, small_market_rate_units = split_market_rate_units( market_rate_units) main_market_rate_units, backup_market_rate_units = main_backup_market_rate_units( big_market_rate_units, percent_affordable, est_canvassers) bonus_market_rate_units = get_bonus_market_rate_units( small_market_rate_units, main_market_rate_units) main_units = merge_units(main_afford_units, main_market_rate_units) main_units = main_units.sort_values("score", ascending=False).reset_index(drop=True) backup_units = merge_units(backup_afford_units, backup_market_rate_units) bonus_units = merge_units(bonus_afford_units, bonus_market_rate_units) backup_dict = match_frames(main_units, backup_units, colname="bigcount") main_unit_match, bonus_unit_match = bonus_match(main_units, bonus_units) bonus_dict = match_frames(main_unit_match, bonus_unit_match) make_pdf(main_units, backup_units, bonus_units, backup_dict, main_unit_match, bonus_unit_match, bonus_dict, folder_name) bond_assign_pdf(main_units, folder_name) #Email 3 pdfs to email address specified send_email(email, folder_name) print 'sent email' #Delete the temp folder shutil.rmtree(folder_name) return
def rosters(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) act_list = [] activities = db.get_user_activity(user_id=user_info[0]['id']) for activity in activities: activity_details = db.get_activity( activity_id=activity['activityid'])[0] if user_info[0]['id'] == activity_details['leader']: activity_details['latitude'] = float(activity_details['latitude']) activity_details['longitude'] = float( activity_details['longitude']) activity_details['time'] = int( time.mktime(activity_details['datetime'].timetuple())) * 1000 activity_details['date'] = activity_details['datetime'].date( ).strftime('%m/%d/%Y') act_list.append(activity_details) if request.args.get('loadActivityID') is not None: user_activity = db.get_user_activity( activity_id=request.args['loadActivityID']) users = [] for record in user_activity: user = db.get_user(user_id=record['userid'], select='id, uname')[0] user['approved'] = record['isApplicant'] users.append(user) return jsonify(users=users) if request.method == 'POST': activityID = request.form['activityID'] playerID = request.form['playerID'] playerUsr = db.get_user(playerID) if request.form['action'] == 'add': db.edit_user_activity_is_applicant(playerID, activityID, 0) utilities.send_email(playerUsr[0]['email'], 'Activity Request Accepted', 'You were accepted to an activity!') else: db.leave_activity(user_id=playerID, activity_id=activityID) utilities.send_email(playerUsr[0]['email'], 'Removed From Activity', 'You were kicked from an activity!') return render_template('RostersPage.html', user=user_info, activities=act_list, maps_key=utilities.get_key('google_maps'))
def rosters(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) act_list = [] activities = db.get_user_activity(user_id=user_info[0]['id']) for activity in activities: activity_details = db.get_activity(activity_id=activity['activityid'])[0] activity_details['latitude'] = float(activity_details['latitude']) activity_details['longitude'] = float(activity_details['longitude']) activity_details['time'] = int(time.mktime(activity_details['datetime'].timetuple())) * 1000 activity_details['date'] = activity_details['datetime'].date().strftime('%m/%d/%Y') act_list.append(activity_details) if request.method == 'GET': if request.args.get('loadActivityID') is not None: print(request.args['loadActivityID']) user_activity = db.get_user_activity(activity_id=request.args['loadActivityID']) users = [] for record in user_activity: user = db.get_user(user_id=record['userid'], select='id, uname')[0] user['approved'] = record['isApplicant'] users.append(user) print(users) return jsonify(users=users) if request.method == 'POST': activityID = request.form['activityID'] playerID = request.form['playerID'] playerUsr = db.get_user(playerID) if request.form['action'] == 'add': db.edit_user_activity_is_applicant(playerID, activityID, 0) utilities.send_email(playerUsr[0]['email'], 'Activity Request Accepted', 'You were accepted to an activity!') elif request.form['action'] == 'kick': db.leave_activity(user_id=playerID, activity_id=activityID) utilities.send_email(playerUsr[0]['email'], 'Removed From Activity', 'You were kicked from an activity!') return render_template('RostersPage.html', user=user_info, activities=act_list, maps_key=utilities.get_key('google_maps'))
def main(): releases = load_releases(chrome_driver, stockx_url) final_releases_list = find_profit_snk(chrome_driver, select_releases( chrome_driver, releases), prem=prem_percent) if len(final_releases_list) > 0: for email in send_to_email: try: utilities.send_email( smtp_server=email_server, email_user=email_user, email_password=email_pass, send_from=email_user, send_to=email, msg=utilities.dict_to_string(final_releases_list)) except: traceback.print_exc() print("Failed to send email to %s" % email) print(utilities.dict_to_string(final_releases_list))
def search(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) categories = db.get_activity_type() if request.method == 'POST': if request.form.get('join', None) is None: activity_name = request.form['activity-name'] results = db.get_activity(name=activity_name) return render_template('SearchResultsPage.html', user=user_info, results=results, categories=categories, maps_key=utilities.get_key('google_maps')) else: db.add_user_activity(user_info[0]['id'], request.form['activity-id']) utilities.send_email(user_info[0]['email'], 'Activity Joined', 'You joined: ' + request.form['activity-name-item']) return render_template('SearchResultsPage.html', user=user_info, categories=categories, maps_key=utilities.get_key('google_maps')) else: return render_template('SearchResultsPage.html', user=user_info, categories=categories, maps_key=utilities.get_key('google_maps'))
def create_event(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) categories = db.get_activity_type() if request.method == 'POST': activity_name = request.form['activity-name'] category = request.form['category'] private = request.form['private'] date = request.form['date'] time = request.form['time'] duration = request.form['duration'] latitude = request.form['lat'] longitude = request.form['lng'] num_of_players = request.form['num-of-players'] skill = request.form['skill-level'] datetime = utilities.combine_datetime(date, time) db.add_activity(name=activity_name, category=category, datetime=datetime, duration=duration, latitude=latitude, longitude=longitude, numplayers=num_of_players, skill=skill, private=private, leader=session.get('user_id'), available=1) utilities.send_email([user_info[0]['email']], 'Activity Created', 'You successfully created an activity!') redirect(url_for('home')) return render_template('create_event.html', key=utilities.get_key('google_maps'), user=user_info, categories=categories)
def get_a_good_server(zone): # I'm banking that five servers won't be missing the /mksysb directory all at the same time.... server_list = AIXServer.objects.filter(active=True, decommissioned=False, zone=zone) success = 0 for server in server_list: if utilities.ping(server): client = SSHClient() if utilities.ssh(server, client): stdin, stdout, stderr = client.exec_command(' [ -d /mksysb/ ] && echo 1 || echo 0') test = stdout.readlines() if int(test[0]): success = 1 break if not success: subject = 'None of the servers have a /mksysbWPAR directory' print subject utilities.send_email(subject, server_list) sys.exit() else: return server
def send_notification_email(self): """Send an email with package execution details, depending on package result. Whether emails are sent depends on the global and package settings. e.g. send_on_success""" if self._script_thread.script_timed_out or self._script_thread.script_exceptioned or Config.NOTIFY_SUCCESS: logging.debug("Sending notification emails.") # Get the global email addresses email_to = Config.NOTIFICATION_EMAILS_TO email_from = Config.NOTIFICATION_EMAILS_FROM # Add any package-specific email addresses if 'notification-emails' in self.parameters: email_to.extend(self.parameters['notification-emails']) subject, body = self.get_notification_text() send_email(subject, body, email_from, email_to, Config.SMTP_ADDRESS, Config.SMTP_USERNAME, Config.SMTP_PASSWORD) logging.info("Notification emails sent.") else: logging.debug("No need to send notification emails.")
def apt_turfs(form): folder_name = 'temp_folder_' + str(randint(1000, 10000)) os.makedirs(folder_name) os.makedirs(folder_name + '/temp_folder') center_address = form['center_address'] email = form['email'] est_canvas_teams = int(form['est_canvas_teams']) center_coords = get_coordinates(center_address, False) team_max = 45 skip_addresses = pd.read_csv("bond_skip_addresses.csv") print 'Set coords' #Filter by region #Filter by skip addresses data = upload_apartment_list() print 'Got apartment list' data = data.loc[:, ["address", "units", "cost", "year", "LAT", "LON"]] #Fill in the missing year and cost with the averages avgyear = np.mean(data.loc[pd.notnull(data["year"]), "year"].map(float)) avgcost = np.mean(data.loc[pd.notnull(data["cost"]), "cost"].map(float)) #print avgyear data.loc[pd.isnull(data["cost"]), "cost"] = avgcost #print data["year"] data.loc[pd.isnull(data["year"]), "year"] = avgyear print data["units"] print 'updated data' min_len = 99999999999999999999 for j in range(20): [temp_data, team_table, temp_routes] = iterate_apts(data, center_coords, random_function, avgyear, avgcost, est_canvas_teams, team_max) total_len = sum([i[-1] for i in temp_routes]) / len(team_table) if total_len < min_len: min_len = total_len min_data = temp_data.copy() min_team = team_table.copy() min_routes = temp_routes[:] pdf = FPDF() #Scroll through each team, label apartment type, write PDFs for ind, row in min_team.iterrows(): teams = row.teams temp_table = min_data[min_data["team"] == ind] temp_table["order"] = get_order(min_routes[ind][0][1:]) temp_table["apt_type"] = "bonus" temp_table["apt_sort"] = 2 big_apts = temp_table[temp_table["units"] >= team_max] start = True for ind1, row1 in big_apts.iterrows(): if start: temp_table.at[ind1, "apt_type"] = "main" temp_table.at[ind1, "apt_sort"] = 0 start = False else: temp_table.at[ind1, "apt_type"] = "backup" temp_table.at[ind1, "apt_sort"] = 1 temp_table = temp_table.sort_values(by=["apt_sort", "order"]) add_pages(temp_table, int(teams), pdf, ind) pdf.output(folder_name + "/temp_folder/pdf.pdf") #Email 3 pdfs to email address specified send_email(email, folder_name) print 'sent email' #Delete the temp folder shutil.rmtree(folder_name) return
def output_turfs(form): #Get the parameters from the Django form num_clusters = form['turf_count'] turf_size = form['turf_size'] region = form['region_name'] center_address = form['center_address'] + " " + region email = form['email'] if form['extra_filters']: extra_filters = form['extra_filters'] else: extra_filters = None include_nonvoters = form['include_nonvoters'] try: #send_error_email(email) folder_name = 'temp_folder_' + str(randint(1000, 10000)) os.makedirs(folder_name) os.makedirs(folder_name + '/temp_folder') #Updated data is the master list of addresses and # of registered voters #Will replace with an actual database in a future update #data = pd.read_excel("Updated_data.xlsx") #data = pd.read_excel("District_7_data.xlsx") data = read_mysql_data( "SELECT distinct region, address, full_street, orig_address, voters, doors, NUMBER, STREET, LAT, LON FROM canvas_cutting.cutter_canvas_data where region = '{region}'" .format(region=region)) print len(data) if extra_filters: query = """ SELECT distinct address as vd_address FROM `voter_data_{region}` vd WHERE {extra_filters} """.format(region=region, extra_filters=extra_filters) good_addresses = read_mysql_data(query) print len(good_addresses) if not include_nonvoters: columns = data.columns data = data.merge(good_addresses, how="inner", left_on="orig_address", right_on="vd_address") data = data.loc[:, columns] print len(data) else: columns = data.columns v_data = data.loc[data["voters"] <> 0, :] nv_data = data.loc[data["voters"] == 0, :] v_data = v_data.merge(good_addresses, how="inner", left_on="orig_address", right_on="vd_address") v_data = v_data.loc[:, columns] data = v_data.append(nv_data) v_data, nv_data = None, None print data.columns print data.head() #data = read_mysql_data("SELECT distinct region, address, full_street, orig_address, voters, doors, NUMBER, STREET, LAT, LON FROM canvas_cutting.cutter_canvas_data where region = 'Austin, TX'") #Based on turf size and central point take the X closest addresses make_filtered_file(data, center_address, num_clusters, turf_size, folder_name + "/Test_filter.xlsx") #Take big data file out of memory data = None print 'made filtered file' #Open filtered data slice_data = pd.read_excel(folder_name + "/Test_filter.xlsx") #Open list of file with lists of 2 streets and if they intersect #Format of file is: #12th Street, River Street, TRUE #12th Street, 13th Street, FALSE #intersect_data = pd.read_csv("Intersections_1.csv") #Look at the list of streets and find the intersections #This is used to ensure that we make continuous routes u = update_thresholds(slice_data, region) if not u: print 'Still need to collect more addresses' send_error_email(email) return #Load this list of intersections threshold_dict = load_threshold_dict(region, True) print 'loaded threshold dict' #print slice_data #Give addresses with nulls for voters and doors 0 for voters and doors slice_data = update_slice_data_nulls(slice_data) #Add avg lat and lon for street to each address. This will be used for clustering slice_data_updated = update_slice_data_avgs(slice_data) #Create clusters - assign a turf # to each address slice_data_updated = update_slice_data_clusters( slice_data_updated, num_clusters) #Look for clusters that are not a continuous route. #Remove streets that don't connect from the cluster slice_data_updated = update_slice_data_check_clusters( slice_data_updated, num_clusters, threshold_dict) #For streets that got removed, try to find a new cluster slice_data_updated = check_bad_streets(slice_data_updated, threshold_dict) print 'clusters' #Scroll through turfs and split turfs with too many doors into 2 #As long as there are 2 turfs check_for_splits = True while check_for_splits: check_for_splits = False #Reorder clusters by proximity to center point slice_data_updated = update_cluster_numbers(slice_data_updated) #Cluster totals is a frame where each row is a cluster and contains cluster-level stats cluster_totals = get_cluster_totals(slice_data_updated) #Take the largest clusters and split them into 2 for i in cluster_totals.itertuples(): #Right now we're setting the max size of a turf as 2.4 doors * the turf size #And then subtract (.3 * the turf size) doors for each km of walking distance #We've found these numbers are OK for a 2.5 hour canvas, erring to the side of being too big #Def want to make these configurable max_size = 2 * turf_size - (.25 * turf_size * i.walking_distance) #Only split a turf if: #1. It has more doors than the max #2. It has more than 1 address (this is too keep it 1 team per address and avoid confusion) #(We never split into more than 2 turfs, it was just too messy when I tried it with more). splits = min( int(i.doors) / int(max_size / 2), int(i.addresses), 2) splits = max(splits, 1) if splits > 1: #Split turf into 2 slice_data_updated = split_cluster(slice_data_updated, i.Cluster, splits) check_for_splits = True #Remove no voter clusters for i in cluster_totals.itertuples(): if i.voters == 0: #print i slice_data_updated.loc[slice_data_updated["Cluster"] == i.Cluster, "Cluster"] = -1 print 'splits' #Reorder clusters by distance slice_data_updated = update_cluster_numbers(slice_data_updated) #Get cluster-level statistic cluster_totals = get_cluster_totals(slice_data_updated) #Scroll through clusters and try to merge small turfs with another turf missing_clusters = [] for i in cluster_totals.itertuples(): if i.Cluster in missing_clusters: continue #Min desired turf size is 1.8 * turf_size - (.3 * turf size * walking distance in km) min_size = 1.4 * turf_size - (.25 * turf_size * i.walking_distance) if i.doors < (min_size) and i.addresses < (min_size): #Function checks for a potential merger upd_cluster = new_whole_cluster(cluster_totals, slice_data_updated, i.Cluster, threshold_dict, turf_size, missing_clusters) if upd_cluster: #If there's a merger update the cluster column on the list of addresses slice_data_updated.loc[( slice_data_updated.Cluster == i.Cluster), "Cluster"] = upd_cluster #Updated the list of merged clusters so we don't try to merge with a cluster that already merged missing_clusters.append(i.Cluster) missing_clusters.append(upd_cluster) print 'merging' #Reorder clusters by distance slice_data_updated = update_cluster_numbers(slice_data_updated) #Get cluster-level statistic cluster_totals = get_cluster_totals(slice_data_updated) #Scroll through clusters and try to merge small turfs with another turf missing_clusters = [] for i in cluster_totals.itertuples(): if i.Cluster in missing_clusters: continue #Min desired turf size is 1.8 * turf_size - (.3 * turf size * walking distance in km) min_size = 1.4 * turf_size - (.25 * turf_size * i.walking_distance) if i.doors < (min_size) and i.addresses < (min_size): #Function checks for a potential merger upd_cluster = new_whole_cluster(cluster_totals, slice_data_updated, i.Cluster, threshold_dict, turf_size, missing_clusters) if upd_cluster: #If there's a merger update the cluster column on the list of addresses slice_data_updated.loc[( slice_data_updated.Cluster == i.Cluster), "Cluster"] = upd_cluster #Updated the list of merged clusters so we don't try to merge with a cluster that already merged missing_clusters.append(i.Cluster) missing_clusters.append(upd_cluster) #re-order clusters by distance slice_data_updated = update_cluster_numbers(slice_data_updated) #Get cluster level stats cluster_totals = get_cluster_totals(slice_data_updated) print 'were close' #Remove clusters that are too small for i in cluster_totals.itertuples(): min_size = .45 * turf_size if i.doors < (min_size): slice_data_updated.loc[slice_data_updated["Cluster"] == i.Cluster, "Cluster"] = -1 #re-order clusters by distance slice_data_updated = update_cluster_numbers(slice_data_updated) #Get cluster level stats cluster_totals = get_cluster_totals(slice_data_updated) #Write list of addresses to file slice_data_updated.to_excel(folder_name + "/temp_folder/Cluster_data.xlsx") #Write list of turfs to file cluster_totals.to_excel(folder_name + "/Cluster_totals.xlsx") #Take files out of memory slice_data_updated = None cluster_totals = None #Read cluster data data = pd.read_excel(folder_name + "/temp_folder/Cluster_data.xlsx") print 'making pdf' #Create a PDF file pdf = FPDF() #Figure out how many clusters we have max_cluster = max(data["Cluster"]) #For each cluster make a page on the PDF file #Each page will have a map with dots for each address #And a list of streets to hit #These will be printed and given to canvassers for cluster in range(max_cluster + 1): #For each cluster get the list of addresses in that cluster zoom_plot_data = data.loc[data["Cluster"] == cluster, :] zoom_plot_data = zoom_plot_data.reset_index() #Get the number of registered doors and voters doors = sum(zoom_plot_data['doors']) voters = sum(zoom_plot_data['voters']) #Make an html map from the list of addresses make_html_file(zoom_plot_data, folder_name) #List all the streets the canvasser will hit street_list = get_street_list(zoom_plot_data) #Put text on the PDF with info about the turf text_page(pdf, cluster, street_list, doors, voters) #Convert the html file into an image file make_img_file(cluster, folder_name) #Put the image file onto the PDF add_img(pdf, cluster, folder_name, w=150) #Save the PDF pdf.output(folder_name + '/temp_folder/Turfs.pdf', 'F') print 'pdf saved' #Make a new PDF file #This file will be a list of addresses for the canvassers to visit pdf = FPDF(format='letter', unit='in', orientation='P') pdf.set_fill_color(215) pdf.set_auto_page_break(auto=True, margin=0.1) #Scroll through list of clusters and write the list to PDF for i in range(max_cluster + 1): print i try: write_cluster(pdf, data, i) except: print data[data["Cluster"] == i] print 'pdf 2 saved' #Write sheets to the PDF pdf.output(folder_name + '/temp_folder/Sheets.pdf', 'F') print 'pdf 3 saved' #Read cluster totals file data = pd.read_excel(folder_name + "/Cluster_totals.xlsx") #Make another PDF file #This will be the master sheet used to assign canvas teams #This is set up for teams of 2 and 4 depeding on turf size #Need to make this configurable pdf = FPDF(format='letter', unit='in', orientation='P') pdf.set_fill_color(25) pdf.add_page() pdf.set_font('Times', 'B', 14.0) pdf.cell(7, 0.0, "Team Assignment Sheet", align='C') pdf.ln(0.5) #Scroll through clusters and write data for each cluster for i in data.itertuples(): write_assign_sheet(pdf, i, turf_size) print 'assigned sheets' pdf.output(folder_name + '/temp_folder/Assign_sheet.pdf', 'F') print 'saved file' #Email 3 pdfs to email address specified send_email(email, folder_name) print 'sent email' #Delete the temp folder shutil.rmtree(folder_name) print 'deleted file' except Exception as e: send_error_report(email, e)
def search(): if session.get('user_id', None) is None: return redirect(url_for('main_page')) user_info = db.get_user(session.get('user_id')) categories = db.get_activity_type() skills = { 0: 'Any', 1: 'Beginner', 2: 'Intermediate', 3: 'Expert', 4: 'Master' } access = {0: 'Public', 1: 'Private'} user_activities = {} db_user_activities = db.get_all_user_activities() i = 0 for u_acts in db_user_activities: value = [u_acts['userid']] key = u_acts['activityid'] if len(user_activities) > 0: if user_activities.get(key) is not None: user_activities[key].append(value) else: user_activities[key] = value else: user_activities[key] = value i += 1 if request.method == 'POST': if request.form.get('join', None) is None: activity_name = request.form['activity-name'] results = db.get_act_type_join(name=activity_name) return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, results=results, a=access, skills=skills, categories=categories, maps_key=utilities.get_key('google_maps')) else: db.add_user_activity(user_info[0]['id'], request.form['activity-id'], request.form['activity-private']) if request.form['activity-private'] == '0': utilities.send_email([user_info[0]['email']], 'Activity Joined', 'You successfully joined an activity!') return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, categories=categories, a=access, skills=skills, maps_key=utilities.get_key('google_maps')) else: return render_template('SearchResultsPage.html', user=user_info, u_activities=user_activities, categories=categories, a=access, skills=skills, maps_key=utilities.get_key('google_maps'))
def update_server(): count = 0 zone = 1 nonprod_base_server = get_a_good_server(zone) zone = 2 prod_base_server = get_a_good_server(zone) main_server_list = AIXServer.objects.filter(active=True, decommissioned=False) # PRODUCTION # get the dir listing for /mksysb main_command = 'ssh ' + prod_base_server.name + ' ls -lp /mksysb | grep mksysb | grep -v /' p = Popen(main_command, shell=True, stdout=Pipe) p.wait() prod_main_directory_list = p.stdout.readlines() # get the dir listing for /mksysb main_command = 'ssh ' + nonprod_base_server.name + ' ls -lp /mksysb | grep mksysb | grep -v /' p = Popen(main_command, shell=True, stdout=Pipe) p.wait() main_directory_list = p.stdout.readlines() # get the dir listing for /mksysb/VIOS vios_command = 'ssh ' + nonprod_base_server.name + ' ls -lp /mksysb/VIOS | grep -v log | grep -v total | grep -v /' p = Popen(vios_command, shell=True, stdout=Pipe) p.wait() vios_directory_list = p.stdout.readlines() # print vios_directory_list # get the dir listing for /mksysb/WPARS wpars_command = 'ssh ' + nonprod_base_server.name + ' ls -lp /mksysb/WPARS | grep bak | grep -v /' p = Popen(wpars_command, shell=True, stdout=Pipe) p.wait() wpars_directory_list = p.stdout.readlines() main_directory_list += prod_main_directory_list # these are lists of mksysb entries from the directories of good mksysbs to add yesterdays_list = [] todays_list = [] old_list = [] old_dict = {} # we are iterating over every mksysb rather than every server because # we care more about what entries there are. def get_dir_lists(list, count): for entry in list: # NOTO: key will have to be the hostname. If there are duplicates, # we will need to get those first, set the duplicates flag, check # if they exist in the database and create if not, and then ditch # them so they don't screw up the dict # also of note, I'm really only looking for one mksysb per day # if 2 were created on the same day, it doesn't really matter, it exists # we DO care if it was created midday and not picked up at the 4am run test = entry.rstrip().split() filename = test[8] # These are the good ones # today = str(datetime.date.today()) # yesterday = str(datetime.date.today() - timedelta(1)) # FIXME pretending it's Monday today = str(datetime.date.today()) yesterday = str(datetime.date.today() - timedelta(1)) # month from ls is in text and we need to convert it for the timestamp month = str(strptime(test[5], '%b').tm_mon) # need one without the zero also to compare below temp_month = month if len(month) == 1: month = '0' + month if len(test[6]) == 1: day = '0' + test[6] else: day = test[6] date = month + '-' + day # *nix listings are in time, unless it's last year then it lists the # year instead so we need to find the : to determine which it is timestamp = test[7] if re.match('..:..', timestamp, flags=0): # This means the timestamp is within the last 6 months our_month = datetime.date.today().month # print 'our month .' + str(our_month) + '.' # print 'temp month .' + str(temp_month) + '.' if int(temp_month) <= int(our_month): year = '2015' else: year = '2014' else: year = timestamp datestamp = year + '-' + date # FIXME take out looking for mksysb from 2 days ago, just for testing!!! # print 'BINGO' # print filename # print datestamp if datestamp == today: todays_list.append(filename.split('.')[0]) count += 1 elif datestamp == yesterday: yesterdays_list.append(filename.split('.')[0]) count += 1 else: old_list.append(filename.split('.')[0]) count += 1 old_dict[filename.split('.')[0]] = datestamp # print 'count = ' + str(count) # lets get the all the lists now get_dir_lists(main_directory_list, count) get_dir_lists(vios_directory_list, count) get_dir_lists(wpars_directory_list, count) all_directory_list = main_directory_list + vios_directory_list + wpars_directory_list # we really only need this list on Monday morning for Sunday fails # FIXME CHange email to Monday if datetime.date.today().strftime("%A") == 'Monday': sorted = old_dict.items() sorted.sort() message = '' for x, y in sorted: message = message + x + ' - ' + y + ' \n' utilities.send_email('Old Mksysb files', message) # print 'today----------------------' # print todays_list # print 'yesterday-------------------------' # print yesterdays_list # print 'OLD-------------------------' # print old_list # print 'COUNT======================' # print count # print # print timestamp # FIXME - for now I'm leaving out the timestamp but may want it later in the model # main_server_list = [] for server in main_server_list: success = 0 for entry in all_directory_list: test = entry.rstrip().split() filename = test[8].split('.')[0] # print server.name # print filename if server.name == filename: print server.name + ' is good' success = 1 continue if success == 0: print server.name + 'FAILED!!!!!!!!!'