def delete_mon_device(device_id): if not session.get('logged_in'): return render_template('login.html') pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() monitored_device = pg.get_monitored_device(device_id) # print monitored_device if request.method == "POST": if request.form['submit_button'] == 'Cancel': return redirect(url_for('dashboard')) elif request.form['submit_button'] == 'Delete': pg.delete_mon_device(device_id) flash("Monitored Device: " + device_id + " Deleted") return redirect(url_for('dashboard')) else: return render_template('delete_mon_device.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, monitored_device=monitored_device)
def get_csv_charts(): pg = DB() pg.connect() data_memory = pg.get_mem_utilized() load_avg = pg.get_load_avg() pg.close() title = 'Memory Utilization\n' headers = 'Value,Time\n' data = "" for line in data_memory: data += '{0},{1}\n'.format(line[0], line[1]) title2 = '\n\nLoad Average\n' headers2 = 'Value,Time\n' data2 = "" for line in load_avg: data2 += '{0},{1}\n'.format(line[0], line[1]) csv = (title + headers + data + title2 + headers2 + data2) return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=charts_report.csv" })
def get_csv_device(): pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() pg.close() title = 'Device Information\n' headers = 'Server ID,Status,IP Address,Address,City,State,Zip,Country,Download Rate,Upload Rate\n' data = '{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n'.format( output['device_id'], output['status'], output['ip'], output['address'], output['city'], output['state'], output['zip'], output['country'], output['download_rate'], output['upload_rate']) title2 = '\n\nSystem Information\n' headers2 = 'OS,Kernel,CPU Vendor,CPU Model,RAM (Gigs)\n' data2 = '{0},{1},{2},{3},{4}\n'.format(datums['os'], datums['kernel'], datums['cpu_vendor'], datums['cpu_model'], mem_to_g(datums['mem'])) csv = title + headers + data + title2 + headers2 + data2 return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=device_report.csv" })
def dashboard(): if not session.get('logged_in'): return render_template('login.html') # Get data from Postgres pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() monitored_devices = pg.get_monitored_devices() active_mon_events = pg.get_active_mon_events() active_reachability_events = pg.get_active_reachability_events() interface_status = pg.get_interface_status() crasd = process_check('crasd') pg.close() return render_template( 'home.html', output=output, datums=datums, mem_to_g=mem_to_g, split_line=split_line, to_mbps=to_mbps, monitored_devices=monitored_devices, active_mon_events=active_mon_events, active_reachability_events=active_reachability_events, interface_status=interface_status, crasd=crasd)
def decorated_request(*args, **kwargs): DB.connect() logging.info("[Connection] to database: established") # Process the request response = func(*args, **kwargs) DB.close() logging.info("[Connection] to database: closed") return response
def get_csv_events(): pg = DB() pg.connect() available_mon_events = pg.get_available_mon_events() active_mon_events = pg.get_active_mon_events() active_reachability_events = pg.get_active_reachability_events() historical_mon_events = pg.get_all_h_mon_events() historical_reachability_events = pg.get_all_h_reach_events() pg.close() title = 'Available Monitoring Events\n' headers = 'Event Type ID,Event Description,Monitor\n' data = "" for line in available_mon_events: data += '{0},{1},{2}\n'.format(line['name'], line['description'], line['monitor']) title2 = '\n\nActive Monitoring Events\n' headers2 = 'Event ID,Event Type,Resource,Start Time\n' data2 = "" for line in active_mon_events: data2 += '{0},{1},{2},{3}\n'.format( line['event_id'], line['name'], line['identifier'], line['start_time'].strftime('%Y-%m-%d %H:%M:%S')) title3 = '\n\nActive Reachability Events\n' headers3 = 'Event ID,Device Name,IP Address,Start Time\n' data3 = '' for line in active_reachability_events: data3 += '{0},{1},{2},{3}\n'.format( line['event_id'], line['name'], line['ip'], line['start_time'].strftime('%Y-%m-%d %H:%M:%S')) title4 = '\n\nHistorical Monitoring Events\n' headers4 = 'Event ID,Event Type,Resource,Start Time,Stop Time\n' data4 = '' for line in historical_mon_events: data4 += '{0},{1},{2},{3},{4}\n'.format( line[0], line[1], line[2], line[3].strftime('%Y-%m-%d %H:%M:%S'), line[4].strftime('%Y-%m-%d %H:%M:%S')) title5 = '\n\nHistorical Reachability Events\n' headers5 = 'Event ID,Device Name,IP Address,Start Time,Stop Time\n' data5 = '' for line in historical_reachability_events: data5 += '{0},{1},{2},{3},{4}\n'.format( line[0], line[1], line[2], line[3].strftime('%Y-%m-%d %H:%M:%S'), line[4].strftime('%Y-%m-%d %H:%M:%S')) csv = (title + headers + data + title2 + headers2 + data2 + title3 + headers3 + data3 + title4 + headers4 + data4 + title5 + headers5 + data5) return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=events_report.csv" })
def edit_mon_device(device_id): if not session.get('logged_in'): return render_template('login.html') # get data from postgres pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() monitored_device = pg.get_monitored_device(device_id) update = {} form = ValidateEditMonDevice() if request.method == 'POST': if request.form['submit_button'] == 'Cancel': return redirect(url_for('dashboard')) elif request.form['submit_button'] == 'Save': if form.validate_on_submit(): if request.form.get('name') != monitored_device['name']: update['identifier'] = request.form.get('name') # print "need to update", request.form['name'] if request.form.get('ip') != monitored_device['ip']: update['target_ip_address'] = request.form.get('ip') # print "need to update", request.form['ip'] if request.form.get('monitored'): monitor = request.form.get('monitored') if monitor != monitored_device['monitor']: update['monitor'] = request.form.get('monitored') # print "need to update", request.form.get('monitored') if update: pg.update_mon_device(update, device_id) flash('Monitored Device {0} Updated'.format(device_id)) else: flash('Monitored Device: No Changes Detected') return redirect(url_for('dashboard')) return render_template('edit_monitored_devices.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, monitored_device=monitored_device, form=form) else: return render_template('edit_monitored_devices.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, monitored_device=monitored_device, form=form)
class DBThread(Thread): def __init__(self, queue): Thread.__init__(self, name="Thread-DB") self.queue = queue self.db = DB() self.logit = logging.getLogger("logit") self.to_commit = Queue() self.commit_size = 1 def run(self): self.db.connect() while True: tweet = self.queue.get() if tweet == None: self.logit.info("Terminal sentinel encountered") self.queue.task_done() break if "delete" in tweet: self.logit.info("delete: %s" % str(tweet)) elif "scrub_geo" in tweet: self.logit.info("scrub_geo: %s" % str(tweet)) elif "limit" in tweet: self.logit.warning("limit: %s" % str(tweet)) elif "status_withheld" in tweet: self.logit.info("status_withheld: %s" % str(tweet)) elif "user_withheld" in tweet: self.logit.info("user_withheld: %s" % str(tweet)) elif "disconnect" in tweet: self.logit.warning("disconnect: %s" % str(tweet)) break elif "warning" in tweet: self.logit.warning("warning: %s" % str(tweet)) elif "id" in tweet and "text" in tweet: try: self.db.add_tweet(tweet) except: self.logit.exception("db add_tweet exception %s" % str(tweet)) # TODO: Get multi-queries working, so we don't need a super low-latency connection to the database # self.to_commit.put(tweet) # Commit self.commit_size tweets at once # if self.to_commit.qsize() % 100 == 0: # self.logit.info('to_commit size: %d' % self.to_commit.qsize()) # if self.to_commit.qsize() >= self.commit_size: # self.logit.info('start committing') # self.db.add_tweets(self.to_commit, self.logit) # self.logit.info('end committing') self.queue.task_done() self.logit.info("Closing db connection") self.db.close()
def main(): pg = DB() pg.connect() data = pg.get_password('jmillan') if data: print data[0] else: print "user not found" pg.close()
def events(): if not session.get('logged_in'): return render_template('login.html') # Get data from Postgres pg = DB() pg.connect() monitoring = pg.get_all_h_mon_events() reachability = pg.get_all_h_reach_events() output = pg.get_device_info() datums = pg.get_device_datums() monitored_devices = pg.get_monitored_devices() active_mon_events = pg.get_active_mon_events() active_reachability_events = pg.get_active_reachability_events() interface_status = pg.get_interface_status() available_mon_events = pg.get_available_mon_events() if request.method == 'POST': selected = request.form.getlist('monitored') event_list = [] event_monitor = [] for event in available_mon_events: for key, value in event.items(): if key == 'name': event_list.append(value) if key == 'monitor': event_monitor.append(value) # validate if data changed change = False for answer_form, db_data, event_list in zip(selected, event_monitor, event_list): if answer_form != db_data: # print "need to update: ", event_list pg.update_mon_event(event_list, answer_form) change = True if change: flash("{0} Monitoring Event Updated".format(event_list)) return redirect(url_for('events')) return render_template( 'events.html', monitoring=monitoring, reachability=reachability, output=output, datums=datums, mem_to_g=mem_to_g, split_line=split_line, to_mbps=to_mbps, monitored_devices=monitored_devices, active_mon_events=active_mon_events, active_reachability_events=active_reachability_events, interface_status=interface_status, available_mon_events=available_mon_events)
def validate(self, username, pwd): pg = DB() pg.connect() pwd_db = pg.get_password(username) if pwd_db: if hashpw(pwd, pwd_db[0]) == pwd_db[0]: return True else: return False else: return False
def add_mon_device(): if not session.get('logged_in'): return render_template('login.html') # get data from postgres pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() device = {} form = ValidateMonDevice() if request.method == 'POST': if request.form['submit_button'] == "Cancel": return redirect(url_for('dashboard')) elif request.form['submit_button'] == "Save": # print form.validate_on_submit() if form.validate_on_submit(): if form.id.data: device['target_device_id'] = form.id.data if form.ip.data: device['target_ip_address'] = form.ip.data if form.name.data: device['identifier'] = form.name.data if request.form.get('monitored'): device['monitor'] = request.form.get('monitored') if device: pg.insert_mon_device( device) # we send data to postgres here flash("Monitored Device {0} Added".format(form.name.data)) return redirect(url_for('dashboard')) return render_template('add_monitored_device.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, form=form) else: return render_template('add_monitored_device.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, form=form)
def install_scraper(scraper): DB.connect() scraper_id = DB.query("SELECT scraper_id FROM scrapers WHERE service=?", [scraper.service]) if not scraper_id: kodi.set_property("new_scraper", "true", 'script.module.scrapecore') settings_definition = '' for s in scraper.settings_definition: settings_definition += "\n\t\t" + s settings_definition = settings_definition.replace( "{NAME}", scraper.name) settings_definition = settings_definition.replace( "{SERVICE}", scraper.service) DB.execute( "INSERT INTO scrapers(service, name, settings, enabled) VALUES(?,?,?,1)", [scraper.service, scraper.name, settings_definition]) DB.commit()
def main(conf_paths=confPaths): """ Main function of the program It loads all the interfaces and modules described on the config files. """ localedir = os.path.join(os.path.dirname(__file__), 'locale') conf = ConfigParser.RawConfigParser() conf.read(conf_paths) _init_log(conf) _init_i18n(conf, localedir) db = DB() db.connect(conf.get("core", "db_host"), int(conf.get("core", "db_port")), conf.get("core", "db_name")) core = db.core() _load_complements(conf) _restore_sched(core) while 1: sleep(1000) # I didn't find any better wait method
def charts(): if not session.get('logged_in'): return render_template('login.html') pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() data_memory = pg.get_mem_utilized() values = [] labels = [] for line in data_memory: values.append(mem_to_meg(line[0])) labels.append(line[1].strftime('%Y-%m-%d %H:%M:%S')) load_avg = pg.get_load_avg() values2 = [] labels2 = [] for line in load_avg: values2.append(line[0]) labels2.append(line[1].strftime('%Y-%m-%d %H:%M:%S')) if datums: legend = 'Memory Used (Memory Available {0}) Megs'.format( mem_to_meg(datums['mem'])) else: legend = 'Memory Used (0) Megs' legend2 = "Load Average" return render_template('charts.html', output=output, datums=datums, mem_to_meg=mem_to_meg, split_line=split_line, values=values, labels=labels, legend=legend, values2=values2, labels2=labels2, legend2=legend2)
def edit_device_info(): if not session.get('logged_in'): return render_template('login.html') # get data from postgres pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() device = {} form = ValidateDeviceRecords() if request.method == 'POST': if request.form['submit_button'] == 'Cancel': return redirect(url_for('dashboard')) elif request.form['submit_button'] == 'Save': items_updated = "" if form.validate_on_submit(): if request.form.get('status') != output['status']: device['status'] = request.form.get('status') items_updated += ' Status ' if form.ip.data != output['ip']: device['ip_address'] = form.ip.data items_updated += ' IP Address ' if form.download.data.strip() != str(output['download_rate']): device['download_rate'] = form.download.data items_updated += ' Download Rate ' if form.upload.data != str(output['upload_rate']): device['upload_rate'] = form.upload.data items_updated += ' Upload Rate ' if form.address.data != output['address']: device['address'] = form.address.data items_updated += ' Address ' if form.city.data != output['city']: device['city'] = form.city.data items_updated += ' City ' if form.state.data != output['state']: device['state'] = form.state.data items_updated += ' State ' if form.country.data != output['country']: device['country'] = form.country.data items_updated += ' Country ' if form.zip.data != output['zip']: device['zip'] = form.zip.data items_updated += ' Zip Code ' if form.phone.data != output['phone']: device['phone'] = form.phone.data items_updated += ' Phone ' if form.name.data != output['name']: device['name'] = form.name.data items_updated += ' Name ' if device: pg.update_device(device) # we send data to postgres here flash("Device Info Updated: " + items_updated) else: flash("Device Info: No Changes Detected") return redirect(url_for('dashboard')) return render_template('edit_device_info.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, form=form) else: return render_template('edit_device_info.html', output=output, datums=datums, split_line=split_line, to_mbps=to_mbps, mem_to_g=mem_to_g, form=form)
def hash_pwd(self, username, pwd): hashed_pwd = hashpw(pwd, gensalt()) pg = DB() pg.connect() pg.add_user(username, hashed_pwd) pg.close()
def main(): # Create the database and its necessary tables at startup DB.connect() DB.create_tables([User]) # Create the Updater and pass it your bot's token. updater = Updater(os.getenv('BOT_TOKEN')) # Get the dispatcher to register handlers dp = updater.dispatcher def stop_and_restart(): updater.stop() os.execl(sys.executable, sys.executable, *sys.argv) @restricted def restart(bot, update): update.message.reply_text('Bot is restarting...') Thread(target=stop_and_restart).start() dp.add_handler(CommandHandler('start', start, pass_args=True)) dp.add_handler(CommandHandler('help', help_me)) dp.add_handler(CommandHandler('issues', issues_comm)) dp.add_handler(CommandHandler('tips', tips)) dp.add_handler( CommandHandler('settings', get_settings, pass_user_data=True)) dp.add_handler( MessageHandler(Filters.text, text_messages, pass_job_queue=True, pass_user_data=True)) dp.add_handler( CallbackQueryHandler(search_callback, pattern='^search:', pass_user_data=True, pass_job_queue=True)) dp.add_handler( CallbackQueryHandler(setting_callbacks, pattern='^set:', pass_job_queue=True)) dp.add_handler(CallbackQueryHandler(change_settings, pattern='^_set:')) dp.add_handler( CallbackQueryHandler(callbacks, pass_job_queue=True, pass_user_data=True)) dp.add_handler( CommandHandler('jobs', manage_jobs, pass_job_queue=True, pass_args=True)) dp.add_handler(CommandHandler('r', restart)) # log all errors dp.add_error_handler(error) updater.start_polling() updater.idle()
def fetch_choices(): """ Fetch data for CHOICES This script is run in the init phase of server. All DB data needed to fill and render templates properly are fetched here and displayed. """ # define choices source choices = dict() choices['ph_product_name'] = list() choices['columns'] = list() choices['compare'] = list() # populate 'ph_product_name' choices DB.connect() ph_pn_query = SkuEntry.select(SkuEntry.ph_product_name).distinct() DB.close() choices['ph_product_name'] = [(r.ph_product_name, r.ph_product_name) for r in ph_pn_query] choices['ph_product_name'] = sorted(choices['ph_product_name']) choices['ph_product_name'].insert(0, ("all", "ALL Products")) # populate 'columns' choices list excluded = () choices['columns'] = list() # take each column and if its not in excluded tuple, add it to list for c in SkuEntry._meta.sorted_field_names: if c not in excluded: item = { 'id': c, 'name': SkuEntry._meta.fields[c].verbose_name, 'type': SkuEntry._meta.fields[c].db_field } choices['columns'].append(item) # define all available operations over the data in database operators = { "equals": lambda a, b: a == b, "does not equal": lambda a, b: a != b, "contains": lambda a, b: a.contains(b), "does not contain": lambda a, b: ~(a.contains(b)), "greater than": lambda a, b: (a > b) & (a >= 0), "less then": lambda a, b: (a < b) & (a >= 0), "empty or not applicable": lambda a: (a == -1) | (a.is_null()), "applicable": lambda a: ~((a == -1) | (a.is_null())), "unlimited": lambda a: a == -2 } # specify valid (and desired and in order) operators for given datatype applicable = dict() applicable["string"] = ("equals", "contains", "does not contain", "empty or not applicable", "applicable") applicable["int"] = ("equals", "does not equal", "greater than", "less then", "empty or not applicable", "applicable", "unlimited") applicable["bool"] = ("equals", ) # populate choices for compare choices['compare'] = dict() for dtype, ops in applicable.items(): choices['compare'][dtype] = OrderedDict([(o, operators[o]) for o in ops]) # set a special tuple of compare operators where search value is not # applicable and where we don't want to show the value input field in the # template choices['compare_no_value'] = ("empty or not applicable", "applicable", "unlimited") # simple lookup table for data type mapping choices['field_type'] = {'int': int, 'string': unicode, 'bool': bool} return choices
res = { 'InsPageLink': IG_PROFILE + username, 'InsPageName': username, 'BioText': instagram_profile.biography, 'FolowerAtUpdate': instagram_profile.followers, 'FolowingAtUpdate': instagram_profile.following, 'PostCount': instagram_profile.posts, 'SiteLink': instagram_profile.external_url, 'Check': True, } return res # Database setUp. mydb = DB() mydb.connect() mydb.select_database('instagram_test') with alive_bar(4325) as bar: with open('usernames.txt') as f: username_list = f.readlines() for acc in username_list: acc = acc.strip() ig_uni_key = IG_PROFILE + acc mydb.refinstagram_col() filter = {'InsPageLink': ig_uni_key} status = mydb.find_one(filter) try: if status['Check'] == True:
def main(argv): """(list of strs) -> None Only takes the sys.argv list. Uses this for parsing command line commands. Adding files to the database happens FIRST. $ python3 testlabs.py -FLAG ARGUMENT -FLAG ARGUMENT All flags Purpose a | Autodetect. Scans for unadded files and adds them automatically. avg | Averages. Plots segmental averages using a constant size. sum | Drops segment calculated values. c | Smoothing. Plot x_name, smoothed_y. Only works from Times series. -x sub flag requires -dat d | Directory. This indicates you want to import a directory into the dat | Data. Indicates the data to be used for some flags. database instead of just one file f | File. Put one specific file into the database g | Generate tables j | Join. Stitches all quarters together into one VERY colorful graph p | Plot. x_name,y_name. Also requires the -s flag s | Series. Used for correct select series data. Time or DV. sbf | show_best_fit. Calls this function. That's it. x | Segmentation. Does segment based things. This should be a leading flag. o | Dump. Dumps data from the database into a csv. t | Trim. Uses a +/-5 percent band for data trimming & plotting zm | Zoom. Zooms into a section of a graph. Uses '-r start,end' to function. """ basebase = DB('postgres', 'cutler', host='localhost', user='******', password='******') basebase.connect() basebase.cur_gen() # Check for DB add flags if '-a' in argv: pass elif '-g' in argv: basebase.create_table('./generate_tables.sql') print('TABLES CREATED') elif '-drop' in argv: basebase.execute('DROP TABLE dv_data;') basebase.execute('DROP TABLE dv_defaults;') basebase.execute('DROP TABLE time_data;') basebase.execute('DROP TABLE time_defaults;') basebase.execute('DROP TABLE files;') elif '-d' in argv: index = argv.index('-d') + 1 # Check if it's a directory if isdir(argv[index]): # Needs a trailing / if argv[index][-1] != '/': # Create one argv[index] = argv[index] + '/' print('+Be sure directories have a trailing /') # Loop through items in directory for item in listdir(argv[index]): if ('dvt' not in item) and (isfile(argv[index] + item)) and ('kplr' in item) and ('llc_lc.tbl' in item): into_db_timeseries(basebase, argv[index], item) elif ('dvt' in item) and (isfile(argv[index] + item)) and ('kplr' in item): into_db_dvseries(basebase, argv[index], item) else: print('ERROR: NOT A DIRECTORY') elif '-f' in argv: index = argv.index('-f') + 1 # only absolute pathing only if '/' not in argv[index]: print('+For relative pathing please use "./"') exit() if '/' == argv[index][-1]: print('+Remove trailing slash on -f argument') exit() # Check if it's a file if isfile(argv[index]): if ('dvt' not in item) and ('kplr' in item): into_db_timeseries(basebase, argv[index], item) elif ('dvt' in item) and ('kplr' in item): into_db_dvseries(basebase, argv[index], item) else: print('WARNING: NO DATABASE FLAGS DETECTED') # Check for function flags if '-x' in argv: # Segmentation flags if '-q' in argv: quarter = argv[argv.index('-q') + 1] else: quarter = None if '-t' in argv: trim_segments(basebase) elif '-o' in argv: columns = argv[argv.index('-o') + 1].split(',') rip_to_local(basebase, columns, quarter) # ColumnNames elif '-sbf' in argv: seg_best_fit(basebase, argv[argv.index('-sbf') + 1].split(','), quarter) elif '-j' in argv: seg_stitch(basebase, argv[argv.index('-j') + 1].split(','), quarter) elif '-sum' in argv: generate_summary(basebase, argv[argv.index('-sum') + 1].split(',')) elif '-avg' in argv: generate_segment_averages(basebase, argv[argv.index('-avg') + 1].split(',')) elif '-c' in argv: index = argv.index('-c') if '-dat' not in argv: print('ERROR: DATA NOT SELECTED FOR SMOOTHING') elif len(argv[argv.index('-dat') + 1].split(',')) != 3: print('ERROR: INVALID NUMBER OF -dat ARGUMENTS\nPLEASE USE: 3') print(argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'sqr': # Square Smooth square_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'tri': # Triangular Smooth triangular_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'sav': # Savitzky-Golay Smooting pass elif argv[index + 1] == 'all': # Run all three square_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) triangular_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) else: print('ERROR: NO SMOOTHING TYPE DETECTED') else: print('ERROR: NO SEGMENTATION ACTIONS DETECTED') elif '-p' in argv: if '-s' in argv: index = argv.index('-p') + 1 if ',' not in argv[index]: print('+Plot flag improperly formatted') exit() plot_items = argv[index].split(',') index = argv.index('-s') + 1 series_type = argv[index] if '-q' in argv: index = argv.index('-q') + 1 quarter = argv[index] else: quarter = None pull_n_graph(basebase, plot_items[0], plot_items[1], series_type, quarter=quarter) else: print('ERROR: CANNOT PLOT BECAUSE NO -s FLAG DETECTED') exit() elif '-sbf' in argv: if '-q' in argv: show_best_fit(basebase, 'cadenceno', 'sap_flux', 'time', argv.index('-q')) # This is where the show_best_fit function is run else: for item in range(17): show_best_fit(basebase, 'cadenceno', 'sap_flux', 'time', str(item + 1)) # stitching() elif '-j' in argv: if '-s' in argv: x, y = argv[argv.index('-j') + 1].split(',') stitching(basebase, x, y, argv[argv.index('-s') + 1]) else: print('ERROR: CANNOT PLOT BECAUSE NO -s FLAG DETECTED') elif '-zm' in argv: x, y = argv[argv.index('-zm') + 1].split(',') if '-r' in argv: start, end = argv[argv.index('-r') + 1].split(',') zoom(basebase, x, y, start, end) else: print('ERROR: NO -r FLAG DETECTED!') elif '-s' in argv: print('ERROR: CANNOT PLOT WITHOUT A PROPER -p OR -j FLAG') else: print('WARNING: NO FUNCTION FLAGS DETECTED')