def execute(sql, params, req, funcs=None, binary=False): cur = DB() asjson = True if 'format' in req.args and req.args['format'] == 'table': asjson = False args = {} for p in params: if p in req.args and req.args[p] == '': return error( ' | '.join([k + '=' + v for k, v in req.args.iteritems()]) + ' is not a valid input', asjson) val = '' if p in req.args: val = req.args[p] if funcs and p in funcs: val = funcs[p](val) args[p] = val try: cur.execute(sql, [args[x] for x in params]) if binary: return success(asjson) else: return results(cur.fetchall(), cur.description, asjson) except Exception as e: return error(str(e), asjson)
def loves_movie(userId, movieId): cur = DB() # check that inputs are digits if not userId.isdigit(): return error(str(userId) + ' is not a valid userId') # check that the movieId is a digit if not movieId.isdigit(): return error(str(movieId) + ' is not a valid movieId') cur.execute( """SELECT * FROM Views v WHERE v.UserID = %s AND v.MovieID = %s""", [userId, movieId]) results = cur.fetchall() if len(results) > 0: return error('User already liked this movie') else: cur.execute("""INSERT INTO View (UserID, MovieID) VALUES(%s, %s)""", [userId, movieId]) return Response(json.dumps({'msg': 'Success'}), status=200, mimetype='application/json')
def find(query, components): conn = DB.getConn() c = conn.cursor() c.execute(query, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")") commitkeywords = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files, keywords) commits.append(c) return commits
def saveinDB(data): # Connect to MySQL database db = DB(config.db['host'], config.db['database'], config.db['user'], config.db['password']) data['table'] = config.db['table_resol'] db.execute("INSERT INTO `%(table)s`(`time`, `t1`, `t2`, `t3`, `p1`, `relais`, `flags`, `errors`, `rt1`) VALUES " "(NULL, '%(temp1)d', '%(temp2)d', '%(temp3)d', '%(pump1)d', '%(relais)d', '%(flags)d', '%(errors)d', '%(r1time)d')" % data)
def logTerms(ip, keywords): insertSQL = "INSERT INTO " + DB.searchqueries._table + "(timestamp, ip, terms) " + \ "VALUES(%s, INET_ATON(%s), %s) " conn = DB.getConn() c = conn.cursor() DB.execute(c, insertSQL, (int(time.time()), ip, keywords)) conn.commit()
def orders_tickets(userId, movieId): cur = DB() # check that inputs are digits if not userId.isdigit(): return error(str(userId) + ' is not a valid userId') # check that the movieId is a digit if not movieId.isdigit(): return error(str(movieId) + ' is not a valid movieId') cur.execute( """INSERT INTO Order (Amount, DateTime, UserID, MovieID) VALUES (10, %s, %s, %s)""", [datetime.date, userId, movieId]) return Response(json.dumps({'msg': 'Success'}), status=200, mimetype='application/json')
def install_scraper(scraper): DB.connect() scraper_id = DB.query("SELECT scraper_id FROM scrapers WHERE service=?", [scraper.service]) if not scraper_id: kodi.set_property("new_scraper", "true", 'script.module.scrapecore') settings_definition = '' for s in scraper.settings_definition: settings_definition += "\n\t\t" + s settings_definition = settings_definition.replace( "{NAME}", scraper.name) settings_definition = settings_definition.replace( "{SERVICE}", scraper.service) DB.execute( "INSERT INTO scrapers(service, name, settings, enabled) VALUES(?,?,?,1)", [scraper.service, scraper.name, settings_definition]) DB.commit()
def findByIDs(project, uniqueid): conn = DB.getConn() c = conn.cursor() getcommitsSQL = "SELECT c.*, r.* " + \ "FROM " + DB.commit._table + " c " + \ "INNER JOIN " + DB.repo._table + " r " + \ " ON r.id = c.repoid " whereClause = " 1=1 " components = [] if project and uniqueid: whereClause += "AND r.tagname = %s AND c.uniqueid = %s " components = [project, uniqueid] getcommitsSQL += "WHERE " + whereClause getcommitsSQL += "ORDER BY c.date DESC " DB.execute(c, getcommitsSQL, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files) commits.append(c) return commits
def taskc(): cur = DB() asjson = True if 'format' in request.args and request.args['format'] == 'table': asjson = False for p in cp: if p in request.args and request.args[p] == '': return error( ' | '.join( [k + '=' + v for k, v in request.args.iteritems()]) + ' is not a valid input', asjson) try: cur.execute(c1, [ request.args['Amount'], datetime.now(), request.args['UserID'], request.args['MovieID'] ]) confNum = cur.lastrowid cur.execute(c2, [confNum] + [request.args[x] for x in cp[3:]]) return results([[confNum]], [['ConfirmationNumber']], asjson) except Exception as e: return error(str(e), asjson)
def test(userId=None): cur = DB() # check if input is digit if not userId.isdigit(): return error(str(userId) + ' is not a valid userId') # !!!!!!!!!!!! USE THIS FORMAT TO ESCAPE QUERY !!!!!!!!!!!! cur.execute("""SELECT * FROM User u WHERE u.UserID = %s;""", [userId]) results = cur.fetchall() # array of return print(results) if len(results) >= 1: # user found row = results[0] return render_template('test.html', user={ 'first': row[1], 'last': row[2], 'email': row[3] }) else: return error('User does not exist')
def find(query, components): conn = DB.getConn() c = conn.cursor() c.execute(query, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")") commitkeywords = c.fetchall() DB.execute(c, "SELECT commitid, case when length(data) < 307200 then data else 'TOOLARGE' end as data from " + DB.commitdiffs._table + " WHERE commitid IN (" + allcommitids + ")") commitdata = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + DB.repo.id], i[DB.commit._numColumns + DB.repo.name], i[DB.commit._numColumns + DB.repo.repotypeid], i[DB.commit._numColumns + DB.repo.url], i[DB.commit._numColumns + DB.repo.viewlink], i[DB.commit._numColumns + DB.repo.tagname], i[DB.commit._numColumns + DB.repo.tagmaturity]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]] data = [cdata[DB.commitdiffs.data] for cdata in commitdata if cdata[DB.commitdiffs.commitid] == i[DB.commit.id]][0] if i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.GIT: c = GitCommit() elif i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.SVN: c = SVNCommit() else: c = Commit() c.loadFromDatabase(r, i, files, keywords, data) commits.append(c) return commits
def save_source(self, addon_id, source): DB.execute("REPLACE INTO install_history(addon_id, source) VALUES(?,?)", [addon_id, json.dumps(source)]) DB.commit()
import os import time import sys import time from random import randint from database import DB db = DB() # @app.teardown_appcontext # def close_connection(exception): # db.close_connection(exception) if __name__ == '__main__': db.execute('''DROP TABLE IF EXISTS items''') db.execute('''DROP TABLE IF EXISTS stats''') db.execute( '''CREATE TABLE items(TableID INTEGER PRIMARY KEY, ItemID text, Name text, Descriptor text, ShortUrl text, LongUrl text)''' ) db.execute('''CREATE TABLE stats(TableID INTEGER, CreatedAt INTEGER)''') items = [ ('BHL-001', 'T-Shirt', 'Super Item', 'rndmstrng', 'http://nytimes.com'), ('BHL-002', 'Book', 'Super Books', 'tneoius', 'http://twitter.com'), ('BHL-003', 'Also Something', 'Super super super', 'shrtrl', 'http://short.com'), ] db.executemany(
def find(query, components): conn = DB.getConn() c = conn.cursor() c.execute(query, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join( [str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute( c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() DB.execute( c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")") commitkeywords = c.fetchall() DB.execute( c, "SELECT commitid, case when length(data) < 307200 then data else 'TOOLARGE' end as data from " + DB.commitdiffs._table + " WHERE commitid IN (" + allcommitids + ")") commitdata = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + DB.repo.id], i[DB.commit._numColumns + DB.repo.name], i[DB.commit._numColumns + DB.repo.repotypeid], i[DB.commit._numColumns + DB.repo.url], i[DB.commit._numColumns + DB.repo.viewlink], i[DB.commit._numColumns + DB.repo.tagname], i[DB.commit._numColumns + DB.repo.tagmaturity]) files = [ file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id] ] keywords = [ keyword[DB.commitkeyword.keyword] for keyword in commitkeywords if keyword[DB.commitkeyword.commitid] == i[DB.commit.id] ] data = [ cdata[DB.commitdiffs.data] for cdata in commitdata if cdata[DB.commitdiffs.commitid] == i[DB.commit.id] ][0] if i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.GIT: c = GitCommit() elif i[DB.commit._numColumns + DB.repo.repotypeid] == Repo.Type.SVN: c = SVNCommit() else: c = Commit() c.loadFromDatabase(r, i, files, keywords, data) commits.append(c) return commits
repotypeid tinyint NOT NULL, url varchar(255) NOT NULL UNIQUE, viewlink varchar(512) NULL, tagname varchar(30) NOT NULL, maturity varchar(20) NOT NULL ) COLLATE utf8_general_ci ENGINE=innodb; """ c.execute(sql) if args.testpopulate: print 'Populating Repos...' sql = "INSERT INTO " + DB.repo._table + """(repotypeid, name, url, viewlink, tagname, maturity) SELECT 2, 'Git Test Cases', 'git://github.com/tomrittervg/Code-Audit-Feed-Test-Cases.git', 'https://github.com/tomrittervg/Code-Audit-Feed-Test-Cases/commit/%ID', 'testcases-git', 'development' UNION SELECT 1, 'Subversion Test Cases', 'http://code-audit-feed-testcases.googlecode.com/svn/trunk/', 'http://code.google.com/p/code-audit-feed-testcases/source/detail?r=%ID', 'testcases-svn', 'development' """ DB.execute(c, sql) elif args.populate: print 'Populating Repos...' sql = "INSERT INTO " + DB.repo._table + """(repotypeid, name, url, viewlink, tagname, maturity) SELECT 2, 'Crypto.is Docs', 'https://github.com/cryptodotis/crypto.is-docs', 'https://github.com/cryptodotis/crypto.is-docs/commit/%ID', 'crypto.is-docs', 'beta' UNION SELECT 2, 'Convergence', 'https://github.com/moxie0/Convergence.git', 'https://github.com/moxie0/Convergence/commit/%ID', 'convergence', 'beta' UNION SELECT 2, 'Obfuscated OpenSSH', 'https://github.com/brl/obfuscated-openssh', 'https://github.com/brl/obfuscated-openssh/commit/%ID', 'obfuscated-openssh', 'stable' UNION SELECT 1, 'Phantom', 'http://phantom.googlecode.com/svn/trunk/', 'http://code.google.com/p/phantom/source/detail?r=%ID', 'phantom', 'development' UNION SELECT 8, 'Corkscrew', 'http://www.agroman.net/corkscrew/', NULL, 'corkscrew', 'development' UNION SELECT 9, 'Tahoe-LAFS', 'http://tahoe-lafs.org/source/tahoe-lafs/trunk/', 'http://tahoe-lafs.org/trac/tahoe-lafs/changeset?old_path=%2Ftrunk&old=%ID&new_path=%2Ftrunk&new=%ID', 'tahoe-lafs', 'beta' UNION SELECT 2, 'Briar Prototype', 'git://briar.git.sourceforge.net/gitroot/briar/prototype', 'http://briar.git.sourceforge.net/git/gitweb.cgi?p=briar/prototype;a=commitdiff;h=%ID', 'briar', 'development' UNION SELECT 2, 'Briar Docs', 'git://briar.git.sourceforge.net/gitroot/briar/docs', 'http://briar.git.sourceforge.net/git/gitweb.cgi?p=briar/docs;a=commitdiff;h=%ID', 'briar-docs', 'development' UNION SELECT 2, 'Metadata Anonymization Toolkit', 'https://git.torproject.org/user/jvoisin/mat.git', 'https://gitweb.torproject.org/user/jvoisin/mat.git/commitdiff/%ID', 'mat', 'beta' UNION SELECT 2, 'Encounter', 'https://github.com/secYOUre/Encounter.git', 'https://github.com/secYOUre/Encounter/commit/%ID', 'encounter', 'development' UNION SELECT 2, 'Batphone', 'https://github.com/servalproject/batphone.git', 'https://github.com/servalproject/batphone/commit/%ID', 'serval', 'development' UNION SELECT 8, 'Haveged', 'http://www.issihosts.com/haveged/', '', 'haveged', 'beta' UNION"""
def save(self): if not self.initialized: raise Exception("called save on unitialized Commit object") conn = DB.getConn() c = conn.cursor() sql = "INSERT INTO " + DB.commit._table + """(repoid, date, message, uniqueid) VALUES(%s, %s, %s, %s) ON DUPLICATE KEY UPDATE uniqueid = VALUES(uniqueid)""" c.execute(sql, (self.repo.id, self.date, self.message, self.uniqueid)) if self.commitid <= 0: self.commitid = conn.insert_id() data = self.getChangedTexts(None) data = cPickle.dumps(data, cPickle.HIGHEST_PROTOCOL) data = zlib.compress(data) sql = "DELETE FROM " + DB.commitdiffs._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitdiffs._table + "(commitid, data) " sql += "VALUES(" + str(self.commitid) + ", %s)" c.execute(sql, [data]) if self.files: sql = "DELETE FROM " + DB.commitfile._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitfile._table + "(commitid, file) " for f in self.files: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, self.files) if self.dbkeywords: sql = "DELETE FROM " + DB.commitkeyword._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitkeyword._table + "(commitid, keyword) " for f in self.dbkeywords: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, [x for x in self.dbkeywords]) sql = "DELETE FROM " + DB.commitwordmap._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) data = self.getChangedTexts(None) data.append(self.message.lower()) data.extend([f.lower() for f in self.files]) data = [punctuation.sub(' ', d) for d in data] allwords = set() for d in data: dWords = d.split() dTotalIndex = 0 while dTotalIndex < len(dWords): words = [] dThisIndex = 0 sql = "INSERT INTO " + DB.commitwordmap._table + "(commitid, word) " while dThisIndex < 500 and dTotalIndex < len(dWords): w = dWords[dThisIndex][:50] if len(w) > 2 and w not in allwords: sql += "SELECT " + str(self.commitid) + ", %s UNION " words.append(w) allwords.add(w) dThisIndex += 1 dTotalIndex += 1 sql = sql[:-6] if words: DB.execute(c, sql, words) conn.commit()
def toggle_scraper(scraper_id): DB.execute("UPDATE scrapers SET enabled=ABS(1-enabled) WHERE scraper_id=?", [scraper_id]) DB.commit()
def save(self): if not self.initialized: raise Exception("called save on unitialized Commit object") conn = DB.getConn() c = conn.cursor() sql = "INSERT INTO " + DB.commit._table + """(repoid, date, message, uniqueid) VALUES(%s, %s, %s, %s) ON DUPLICATE KEY UPDATE uniqueid = VALUES(uniqueid)""" c.execute(sql, (self.repo.id, self.date, self.message, self.uniqueid)) if self.commitid <= 0: self.commitid = conn.insert_id() data = self.getChangedTexts(None) data = cPickle.dumps(data, cPickle.HIGHEST_PROTOCOL) data = zlib.compress(data) sql = "DELETE FROM " + DB.commitdiffs._table + " WHERE commitid = " + str( self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitdiffs._table + "(commitid, data) " sql += "VALUES(" + str(self.commitid) + ", %s)" c.execute(sql, [data]) if self.files: sql = "DELETE FROM " + DB.commitfile._table + " WHERE commitid = " + str( self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitfile._table + "(commitid, file) " for f in self.files: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, self.files) if self.dbkeywords: sql = "DELETE FROM " + DB.commitkeyword._table + " WHERE commitid = " + str( self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitkeyword._table + "(commitid, keyword) " for f in self.dbkeywords: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, [x for x in self.dbkeywords]) sql = "DELETE FROM " + DB.commitwordmap._table + " WHERE commitid = " + str( self.commitid) c.execute(sql) data = self.getChangedTexts(None) data.append(self.message.lower()) data.extend([f.lower() for f in self.files]) data = [punctuation.sub(' ', d) for d in data] allwords = set() for d in data: dWords = d.split() dTotalIndex = 0 while dTotalIndex < len(dWords): words = [] dThisIndex = 0 sql = "INSERT INTO " + DB.commitwordmap._table + "(commitid, word) " while dThisIndex < 500 and dTotalIndex < len(dWords): w = dWords[dThisIndex][:50] if len(w) > 2 and w not in allwords: sql += "SELECT " + str(self.commitid) + ", %s UNION " words.append(w) allwords.add(w) dThisIndex += 1 dTotalIndex += 1 sql = sql[:-6] if words: DB.execute(c, sql, words) conn.commit()
def delete_scraper(service): DB.execute("DELETE FROM scrapers WHERE service=?", [service]) DB.commit() write_settings_file()
#otr sql += """ SELECT 6, 'http://bazaar.launchpad.net/~afflux/python-otr/purepython', NULL, 'python-otr', 'beta' UNION""" #browser plugins sql += """ SELECT 2, 'https://github.com/RC1140/cr-gpg.git', 'https://github.com/RC1140/cr-gpg/commit/%ID', 'cr-gpg', 'development' UNION""" #mailinglist sql += """ SELECT 1, 'https://sels.svn.sourceforge.net/svnroot/sels', NULL, 'sels', 'development' UNION SELECT 8, 'http://non-gnu.uvt.nl/pub/mailman/', NULL, 'secure-list-server', 'development' UNION SELECT 2, 'git://git.immerda.ch/schleuder.git', NULL, 'schleuder', 'development' UNION SELECT 8, 'http://www.synacklabs.net/projects/crypt-ml/', NULL, 'crypt-ml', 'development' UNION SELECT 3, 'shibboleth.cvs.sourceforge.net', NULL, 'shibboleth', 'development' UNION SELECT 3, 'mmreencrypt.cvs.sourceforge.net', NULL, 'mmreencrypt', 'development'""" DB.execute(c, sql) #keyword._table + """ --------------------------------------------- if args.keywords: try: c.execute("DROP TABLE " + DB.keyword._table) except: pass c.execute("SHOW TABLES LIKE '" + DB.keyword._table + "'") r = c.fetchone() if r: print "Keyword Table Exists" else: print "Creating Keyword Table..." sql = "CREATE TABLE " + DB.keyword._table + """ (
def main(argv): """(list of strs) -> None Only takes the sys.argv list. Uses this for parsing command line commands. Adding files to the database happens FIRST. $ python3 testlabs.py -FLAG ARGUMENT -FLAG ARGUMENT All flags Purpose a | Autodetect. Scans for unadded files and adds them automatically. avg | Averages. Plots segmental averages using a constant size. sum | Drops segment calculated values. c | Smoothing. Plot x_name, smoothed_y. Only works from Times series. -x sub flag requires -dat d | Directory. This indicates you want to import a directory into the dat | Data. Indicates the data to be used for some flags. database instead of just one file f | File. Put one specific file into the database g | Generate tables j | Join. Stitches all quarters together into one VERY colorful graph p | Plot. x_name,y_name. Also requires the -s flag s | Series. Used for correct select series data. Time or DV. sbf | show_best_fit. Calls this function. That's it. x | Segmentation. Does segment based things. This should be a leading flag. o | Dump. Dumps data from the database into a csv. t | Trim. Uses a +/-5 percent band for data trimming & plotting zm | Zoom. Zooms into a section of a graph. Uses '-r start,end' to function. """ basebase = DB('postgres', 'cutler', host='localhost', user='******', password='******') basebase.connect() basebase.cur_gen() # Check for DB add flags if '-a' in argv: pass elif '-g' in argv: basebase.create_table('./generate_tables.sql') print('TABLES CREATED') elif '-drop' in argv: basebase.execute('DROP TABLE dv_data;') basebase.execute('DROP TABLE dv_defaults;') basebase.execute('DROP TABLE time_data;') basebase.execute('DROP TABLE time_defaults;') basebase.execute('DROP TABLE files;') elif '-d' in argv: index = argv.index('-d') + 1 # Check if it's a directory if isdir(argv[index]): # Needs a trailing / if argv[index][-1] != '/': # Create one argv[index] = argv[index] + '/' print('+Be sure directories have a trailing /') # Loop through items in directory for item in listdir(argv[index]): if ('dvt' not in item) and (isfile(argv[index] + item)) and ('kplr' in item) and ('llc_lc.tbl' in item): into_db_timeseries(basebase, argv[index], item) elif ('dvt' in item) and (isfile(argv[index] + item)) and ('kplr' in item): into_db_dvseries(basebase, argv[index], item) else: print('ERROR: NOT A DIRECTORY') elif '-f' in argv: index = argv.index('-f') + 1 # only absolute pathing only if '/' not in argv[index]: print('+For relative pathing please use "./"') exit() if '/' == argv[index][-1]: print('+Remove trailing slash on -f argument') exit() # Check if it's a file if isfile(argv[index]): if ('dvt' not in item) and ('kplr' in item): into_db_timeseries(basebase, argv[index], item) elif ('dvt' in item) and ('kplr' in item): into_db_dvseries(basebase, argv[index], item) else: print('WARNING: NO DATABASE FLAGS DETECTED') # Check for function flags if '-x' in argv: # Segmentation flags if '-q' in argv: quarter = argv[argv.index('-q') + 1] else: quarter = None if '-t' in argv: trim_segments(basebase) elif '-o' in argv: columns = argv[argv.index('-o') + 1].split(',') rip_to_local(basebase, columns, quarter) # ColumnNames elif '-sbf' in argv: seg_best_fit(basebase, argv[argv.index('-sbf') + 1].split(','), quarter) elif '-j' in argv: seg_stitch(basebase, argv[argv.index('-j') + 1].split(','), quarter) elif '-sum' in argv: generate_summary(basebase, argv[argv.index('-sum') + 1].split(',')) elif '-avg' in argv: generate_segment_averages(basebase, argv[argv.index('-avg') + 1].split(',')) elif '-c' in argv: index = argv.index('-c') if '-dat' not in argv: print('ERROR: DATA NOT SELECTED FOR SMOOTHING') elif len(argv[argv.index('-dat') + 1].split(',')) != 3: print('ERROR: INVALID NUMBER OF -dat ARGUMENTS\nPLEASE USE: 3') print(argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'sqr': # Square Smooth square_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'tri': # Triangular Smooth triangular_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) elif argv[index + 1] == 'sav': # Savitzky-Golay Smooting pass elif argv[index + 1] == 'all': # Run all three square_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) triangular_smooth(basebase, argv[argv.index('-dat') + 1].split(',')) else: print('ERROR: NO SMOOTHING TYPE DETECTED') else: print('ERROR: NO SEGMENTATION ACTIONS DETECTED') elif '-p' in argv: if '-s' in argv: index = argv.index('-p') + 1 if ',' not in argv[index]: print('+Plot flag improperly formatted') exit() plot_items = argv[index].split(',') index = argv.index('-s') + 1 series_type = argv[index] if '-q' in argv: index = argv.index('-q') + 1 quarter = argv[index] else: quarter = None pull_n_graph(basebase, plot_items[0], plot_items[1], series_type, quarter=quarter) else: print('ERROR: CANNOT PLOT BECAUSE NO -s FLAG DETECTED') exit() elif '-sbf' in argv: if '-q' in argv: show_best_fit(basebase, 'cadenceno', 'sap_flux', 'time', argv.index('-q')) # This is where the show_best_fit function is run else: for item in range(17): show_best_fit(basebase, 'cadenceno', 'sap_flux', 'time', str(item + 1)) # stitching() elif '-j' in argv: if '-s' in argv: x, y = argv[argv.index('-j') + 1].split(',') stitching(basebase, x, y, argv[argv.index('-s') + 1]) else: print('ERROR: CANNOT PLOT BECAUSE NO -s FLAG DETECTED') elif '-zm' in argv: x, y = argv[argv.index('-zm') + 1].split(',') if '-r' in argv: start, end = argv[argv.index('-r') + 1].split(',') zoom(basebase, x, y, start, end) else: print('ERROR: NO -r FLAG DETECTED!') elif '-s' in argv: print('ERROR: CANNOT PLOT WITHOUT A PROPER -p OR -j FLAG') else: print('WARNING: NO FUNCTION FLAGS DETECTED')