def signIn(self,username,password): try: if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() start_timeForInsert = time.time()#Start time of insert query start_time = time.time() #Validate user authentication by selecting username from table cursor.execute('SELECT username,password FROM user where username IN (%s)',username) threadlist=[] for row in cursor.fetchall(): if row[0]==username: if row[1]==password: cursor.execute('SELECT thread_name FROM thread') for row1 in cursor.fetchall(): threadlist.append(row1[0]) variables = {'usernameSaved':username,'threadlist' :threadlist} template = JINJA_ENVIRONMENT.get_template('home.html') self.response.write(template.render(variables)) else: self.response.write('Please Enter Correct details') #db.commit() db.close() except DeadlineExceededError, error_message: logging.exception('Failed, exception happened - %s' % error_message) self.response.write('<br />')
def RealMain(argv, data=None): os.environ['LC_ALL'] = 'C' options, args = parser.parse_args(argv[1:]) srv = GetRpcServer(options) backup = Proxy(BackupService_Stub(srv)) db = PgSQL.connect(database=options.dbname, client_encoding="utf-8", unicode_results=1) db.cursor().execute("set client_encoding to unicode") store = LocalStore(db) print 'BEGIN BACKUP' for kind_name in KINDS: sys.stdout.write('\n') cnt = 0 last_key = '' while True: sys.stdout.write('\r%-18s ... ' % kind_name) r = NextChunkRequest() r.kind = kind_name r.last_key = last_key r = backup.NextChunk(r) if not r.entity: break for entity in r.entity: cnt += 1 sys.stdout.write('\r%-18s ... %5d ' % (kind_name, cnt)) o = pickle.load(cStringIO.StringIO(entity.data)) getattr(store, 'save_%s' % kind_name)(entity, o) last_key = entity.key db.commit() sys.stdout.write('\n') print 'BACKUP DONE' db.commit() db.close()
def get(self): cursor = db.cursor() cursor.execute('SELECT * from testapp_greeting;') data = [] for row in cursor.fetchall(): data.append({'id': row[0], 'content': row[2], 'date': row[3].isoformat()}) variables = {'data': data} # j = json.dumps(data) # variables['json'] = j self.response.out.write(json.dumps(variables))
def write_file(self,fileName,name): if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() threadname= cgi.escape(self.request.get('threadNm'))#Get threadname from html username=self.request.get('usernameHd')#Get username from html img_img = image = self.request.get("image") #self.response.write('Creating file %s\n' % filename) write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(fileName, 'w', content_type='image/jpg', retry_params=write_retry_params, options={'x-goog-acl': 'public-read'} ) gcs_file.write(img_img) gcs_file.close() #url of where image going to upload url='https://console.developers.google.com/m/cloudstorage/b/cloudbucket12/o/'+name try:#Inseting url,username and thread name to table cursor.execute('INSERT INTO thread (username,imageref,thread_name) VALUES (%s,%s,%s)',(username,url,threadname)) except: self.response.write('Thread Already Exists') db.commit() cursor.execute('SELECT thread_name FROM thread') threadlist=[] for row1 in cursor.fetchall(): threadlist.append(row1[0]) db.close() self.tmp_filenames_to_clean_up.append(fileName) #passing threadlist,username and threadname variables={'threadlist' :threadlist,'usernameSaved':username,'threadname':threadname} template = JINJA_ENVIRONMENT.get_template('home.html') self.response.write(template.render(variables))
def write_file(self,fileName,name): if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() threadname= cgi.escape(self.request.get('threadNm'))#Get threadname from html username=self.request.get('usernameHd')#Get username from html img_img = image = self.request.get("image") #self.response.write('Creating file %s\n' % filename) write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(fileName, 'w', content_type='image/jpg', retry_params=write_retry_params, options={'x-goog-acl': 'public-read'} ) gcs_file.write(img_img) gcs_file.close() #url of where image going to upload url='Path of googleCloud Bucket'+name try:#Inseting url,username and thread name to table cursor.execute('INSERT INTO thread (username,imageref,thread_name) VALUES (%s,%s,%s)',(username,url,threadname)) except: self.response.write('Thread Already Exists') db.commit() cursor.execute('SELECT thread_name FROM thread') threadlist=[] for row1 in cursor.fetchall(): threadlist.append(row1[0]) db.close() self.tmp_filenames_to_clean_up.append(fileName) #passing threadlist,username and threadname variables={'threadlist' :threadlist,'usernameSaved':username,'threadname':threadname} template = JINJA_ENVIRONMENT.get_template('home.html') self.response.write(template.render(variables))
def get(self, item_id): try: int(item_id) cursor = db.cursor() cursor.execute('SELECT * FROM testapp_greeting WHERE id=%s;' % item_id) data = [] for row in cursor.fetchall(): data.append({'id': row[0], 'content': row[2], 'date': row[3].isoformat()}) result = {'data': data} # j = json.dumps(data) # result['json'] = j result['item_id'] = item_id except: result = {'err': '%s incorrectly formatted'} self.response.out.write(json.dumps(result))
def post(self): threadname = self.request.get('threadnametorm') loggedUserName=self.request.get('usernameLoggedIn') if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() cursor.execute("DELETE FROM thread WHERE thread_name = '%s';"% threadname.strip()) db.commit() cursor.execute('SELECT thread_name FROM thread') threadlist=[] for row1 in cursor.fetchall(): threadlist.append(row1[0]) db.close() variables={'threadlist' :threadlist,'usernameSaved':loggedUserName,'threadname':threadname} template = JINJA_ENVIRONMENT.get_template('home.html') self.response.write(template.render(variables))
def get(self): self.response.headers['Content-Type'] = 'text/plain' if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db=_DB, user='******') cursor = db.cursor() cursor.execute('SHOW TABLES') for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) cursor.execute('SELECT * FROM mqtt_messages LIMIT 200') for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) cursor.execute('SELECT * FROM plugin_google_activity_recognition LIMIT 200') for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) cursor.execute('SELECT * FROM plugin_mode_of_transportation LIMIT 200') for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) cursor.execute('SELECT * FROM locations LIMIT 200') for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) cursor.execute("SELECT from_unixtime(locations.timestamp/1000,'%Y-%m-%d') as day_with_data, count(*) as records FROM locations GROUP by day_with_data;") self.response.write("#Days with data from location data") for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) self.response.write("#activity per day") cursor.execute("SELECT FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d') as day, activity_name, (max(timestamp)-min(timestamp))/1000 as time_elapsed_seconds FROM plugin_google_activity_recognition GROUP BY day, activity_name, FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d %H:%m');") for r in cursor.fetchall(): self.response.write('%s\n' % str(r)) db.close() else: self.response.write('Need to connect from Google Appspot')
def signUp(self,username,password,cnpassword): try: if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() start_timeForInsert = time.time()#Start time of insert query #Inserting new user details inside table cursor.execute('INSERT INTO user (username,password) VALUES (%s,%s)',(username,password)) self.response.write('<br />') start_time = time.time() template = JINJA_ENVIRONMENT.get_template('sign_up.html') self.response.write(template.render()) db.commit() db.close() except DeadlineExceededError, error_message: logging.exception('Failed, exception happened - %s' % error_message) self.response.write('<br />')
def post(self): #Getting data from the html form threadname = self.request.get('threadnameIdHidden') loggedUserName=self.request.get('loggedUserId') if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') cursor = db.cursor() comments='' imageref='' userName='' #retrieve given thread data cursor.execute('SELECT * FROM thread WHERE thread_name = (%s)',threadname) for data in cursor.fetchall(): thread_name=data[2] userName=data[0] imageref=data[1] comments=data[3] list=[] if comments is not None: comments_split=comments.split("ed=ed") for items in comments_split: list.append(items.split('cmnet')) isRmActive='False' if loggedUserName == userName : isRmActive='True' variables = {'imageLink': imageref,'userName': userName,'commentsList':list,'threadname':threadname,'usernameSaved':loggedUserName,'isRmActive':isRmActive} template = JINJA_ENVIRONMENT.get_template('thread.html') self.response.write(template.render(variables))
def mysql_cursor(): db = MySQLdb.connect(host='localhost', passwd='bica', db='ijlc', user='******') return db.cursor(MySQLdb.cursors.DictCursor)
def get(self): """default landing page""" if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db=_DB, user='******') cursor = db.cursor() logging.info("making queries") # some sample queries that will write examples of the sort of # data we have collected to the log so you can get a sense of things self.make_and_print_query(cursor, 'SHOW TABLES', 'Show the names of all tables') self.make_and_print_query( cursor, 'SELECT DISTINCT device_id FROM locations', 'List all device ids') self.make_and_print_query( cursor, "SELECT * FROM mqtt_messages WHERE device_id = '{0}' LIMIT 10" .format(_ID), 'Example contents of mqtt_messages') self.make_and_print_query( cursor, "SELECT * FROM plugin_google_activity_recognition WHERE device_id = '{0}' LIMIT 10 " .format(_ID), 'Example contents of plugin_google_activity_recognition') self.make_and_print_query( cursor, "SELECT * FROM locations WHERE device_id = '{0}' LIMIT 10". format(_ID), 'Example contents of locations') # this query collects information about the number # of log enteries for each day. day = "FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d')" query = "SELECT {0} as day_with_data, count(*) as records FROM {1} WHERE device_id = '{2}' GROUP by day_with_data".format( day, _LOCATIONS, _ID) rows = self.make_query(cursor, query) queries = [{"query": query, "results": rows}] # this query lets us collect information about # locations that are visited so we can bin them. query = "SELECT double_latitude, double_longitude FROM {0} WHERE device_id = '{1}'".format( _LOCATIONS, _ID) locations = self.make_query(cursor, query) #locations = self.make_and_print_query(cursor, query, "locatons") bins = self.bin_locations(locations, _EPSILON) for location in bins: logging.info('%s\n' % str(location)) queries = queries + [{"query": query, "results": bins}] # now get locations organized by day and hour time_of_day = "FROM_UNIXTIME(timestamp/1000,'%H')" day = "FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d')" query = "SELECT {0} as day, {1} as time_of_day, double_latitude, double_longitude FROM {2} WHERE device_id = '{3}' GROUP BY day, time_of_day".format( day, time_of_day, _LOCATIONS, _ID) locations = self.make_query(cursor, query) # and get physical activity per day and hour # activity name and duration in seconds day_and_time_of_day = "FROM_UNIXTIME(timestamp/100, '%Y-%m-%d %H')" elapsed_seconds = "(max(timestamp)-min(timestamp))/1000" query = "SELECT {0} as day, {1} as time_of_day, activity_name, {2} as time_elapsed_seconds FROM {3} WHERE device_id = '{4}' GROUP BY day, activity_name, {5}".format( day, time_of_day, elapsed_seconds, _ACTIVITY, _ID, day_and_time_of_day) activities = self.make_query(cursor, query) # now we want to associate activities with locations. This will update the # bins list with activities. self.group_activities_by_location(bins, locations, activities, _EPSILON) db.close() else: queries = [{ "query": 'Need to connect from Google Appspot', "results": [] }] context = {"queries": queries} logging.info("context") logging.info(context) # and render the response self.render_response('index.html', context)
else: self.response.write('<br />') def addComment(self): if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='DATABASE', user='******') else: db = MySQLdb.connect(host='HOSTIP', port=3306, db='DATABASE', user='******',passwd='PASSWORD') commentsNew=self.request.get('comments') threadname=self.request.get('threadname') loggedUserName=self.request.get('usernameHd') imageref='Nothing' userName='' comments='' cursor = db.cursor() list=[] cursor.execute('SELECT * FROM thread WHERE thread_name = (%s)',threadname) for data in cursor.fetchall(): thread_name=data[2] userName=data[0] imageref=data[1] comments=data[3] if comments is not None: comments=comments+'ed=ed'+loggedUserName+'cmnet'+commentsNew comments_split=comments.split("ed=ed")#Comments are added in format as username+cmnet+comment+ed=ed+username2+cmnet+comment+ed=ed for items in comments_split: list.append(items.split('cmnet')) else: comments=loggedUserName+'cmnet'+commentsNew list.append(comments.split('cmnet'))
def post(self): try: upload_files = self.get_uploads('xlfile') TABLE_NAME = self.request.get('table_name') self.response.out.write('Table Name received. Name=%s' % (TABLE_NAME)) self.response.out.write('File upload received. File count=%d' % len(upload_files)) if len(upload_files) > 0: blob_info = upload_files[0] self.response.out.write('Blob stored key=%s' % (blob_info.key())) user_file = UserFile(blob_key=blob_info.key()) user_file.put() blob_reader = blobstore.BlobReader(blob_info.key()) blob_iterator = BlobIterator(blob_reader) reader = csv.reader(blob_iterator, skipinitialspace=True, delimiter=',') #!!!!!AYHUN !!!!! # IMPLEMENT PARSER HERE #object to use: csv iterator called "reader" # Create/Stick into database count = 1 big_dict = {} num_cols = 0 titles = [] for row in reader: #self.response.out.write(count) #self.response.out.write(row) if count == 1: self.response.out.write("Metadata:<br>") num_cols = 0 for elts in row: catg_list = [elts] big_dict[num_cols] = catg_list num_cols += 1 self.response.out.write(elts + ", ") self.response.out.write("<br>Content:") else: iter_num = 0 for elts in row: big_dict[iter_num].append(elts) iter_num += 1 self.response.out.write(elts + ", ") self.response.out.write("<br>") count += 1 self.response.out.write("Final dict:" + str(big_dict)) ###############SQL CODE -----> use big_dict (dictionary object) to generate SQL code #big_dict current format: dictionary object where key is 0-number of cols in csv file #value is list of values in each row, where the first element of the list is the name of column #use this value to create values for the attributes in database and remainder of list to fill the #the data corresponding to the attribute self.response.out.write('Starting database part.............<br>') self.response.out.write(os.getenv('SERVER_SOFTWARE') + "<br>") if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): self.response.out.write("I should be running on the cloud right now 1<br>") #db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db='gsdmarin', user='******', passwd='1234') db = MySQLdb.connect(host='127.0.0.1', port=3306, db='gsdmarin', user='******', passwd='1234') self.response.out.write("I should be running on the cloud right now 2<br>") else: self.response.out.write("I should be running on a normal computer right now 1<br>") #Cant connect to localhost + if we fall in here we need error handling! Unacceptable. db = MySQLdb.connect(host='173.194.82.159', port=3306, db='gsdmarin', user='******', passwd='1234') self.response.out.write("I should be running on a normal computer right now 2<br>") # Alternatively, connect to a Google Cloud SQL instance using: # db = MySQLdb.connect(host='ip-address-of-google-cloud-sql-instance', port=3306, user='******', charset='utf 8') cursor = db.cursor() # !!!! Code that decides the name of the table goes here #TABLE_NAME = "EMPLOYEE" ------> THIS HAS BEEN DONE IN LINE 87 e.g TABLE_NAME = self.request.get('table_name') # ----------------------------------------------------------------------------------------------------- ####### AYHUN!!!!!!!! # print file name and content to console, just for seeing it self.response.out.write('File name:<br>') self.response.out.write(TABLE_NAME+"<br>") self.response.out.write('File content:<br>') self.response.out.write(big_dict) self.response.out.write('<br>') # query the 'information_schema' to see if a table that has the 'TABLE_NAME' as its name exists table_exists = False sql = "SELECT * FROM information_schema.tables WHERE table_name = '%s'" % TABLE_NAME self.response.out.write("query:" + sql + "<br>") self.response.out.write("result:<br>") cursor.execute(sql) for row in cursor.fetchall(): table_exists = True self.response.out.write( ' '.join(str(e) for e in row) + '<br>') if table_exists: self.response.out.write("Table %s already exists." % TABLE_NAME) self.response.out.write("<br>**********Database before insertion:") sql = "SELECT * FROM gsdmarin.%s" % TABLE_NAME cursor.execute(sql) for row in cursor.fetchall(): self.response.out.write('<br>' + ' '.join(str(e) for e in row) ) self.response.out.write('<br>************************************') # TODO: insert to existing table here sql = [] sql.append("INSERT INTO `gsdmarin`.`%s`" % TABLE_NAME) sql.append("(`%s`," % big_dict[0][0]) for i in range(1,num_cols-1): sql.append("`%s`," % big_dict[i][0]) sql.append("`%s`)" % big_dict[num_cols - 1][0]) sql.append("VALUES") for r in range(1,len(big_dict[0])): if r == 1: sql.append("('"+big_dict[0][1]+"',") else: sql.append(",('"+big_dict[0][1]+"',") for c in range(1,num_cols-1): sql.append("'"+big_dict[c][r]+"',") sql.append("'"+big_dict[num_cols-1][r]+"')") sql.append(';') self.response.out.write("<br>insertion query:<br>" + ' '.join(sql)) try: cursor.execute(' '.join(sql)) db.commit() except Exception, e: db.rollback self.response.out.write("<br>**********Database after insertion:") sql = "SELECT * FROM gsdmarin.%s" % TABLE_NAME cursor.execute(sql) for row in cursor.fetchall(): self.response.out.write("<br>" + ' '.join(str(e) for e in row) ) self.response.out.write('<br>************************************') else:
def get(self): """default landing page""" if (os.getenv('SERVER_SOFTWARE') and os.getenv('SERVER_SOFTWARE').startswith('Google App Engine/')): db = MySQLdb.connect(unix_socket='/cloudsql/' + _INSTANCE_NAME, db=_DB, user='******') cursor = db.cursor() logging.info("making queries") # some sample queries that will write examples of the sort of # data we have collected to the log so you can get a sense of things self.make_and_print_query(cursor, 'SHOW TABLES', 'Show the names of all tables') self.make_and_print_query(cursor, 'SELECT DISTINCT device_id FROM locations', 'List all device ids') self.make_and_print_query(cursor, "SELECT * FROM mqtt_messages WHERE device_id = '{0}' LIMIT 10".format(_ID), 'Example contents of mqtt_messages') self.make_and_print_query(cursor, "SELECT * FROM plugin_google_activity_recognition WHERE device_id = '{0}' LIMIT 10 ".format(_ID), 'Example contents of plugin_google_activity_recognition') self.make_and_print_query(cursor, "SELECT * FROM locations WHERE device_id = '{0}' LIMIT 10".format(_ID), 'Example contents of locations') # this query collects information about the number # of log enteries for each day. day = "FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d')" query = "SELECT {0} as day_with_data, count(*) as records FROM {1} WHERE device_id = '{2}' GROUP by day_with_data".format(day, _LOCATIONS, _ID) rows = self.make_query(cursor, query) queries = [{"query": query, "results": rows}] # this query lets us collect information about # locations that are visited so we can bin them. query = "SELECT double_latitude, double_longitude FROM {0} WHERE device_id = '{1}'".format(_LOCATIONS, _ID) locations = self.make_query(cursor, query) #locations = self.make_and_print_query(cursor, query, "locatons") bins = self.bin_locations(locations, _EPSILON) for location in bins: logging.info('%s\n' % str(location)) queries = queries + [{"query": query, "results": bins}] # now get locations organized by day and hour time_of_day = "FROM_UNIXTIME(timestamp/1000,'%H')" day = "FROM_UNIXTIME(timestamp/1000,'%Y-%m-%d')" query = "SELECT {0} as day, {1} as time_of_day, double_latitude, double_longitude FROM {2} WHERE device_id = '{3}' GROUP BY day, time_of_day".format(day, time_of_day, _LOCATIONS, _ID) locations = self.make_query(cursor, query) # and get physical activity per day and hour # activity name and duration in seconds day_and_time_of_day = "FROM_UNIXTIME(timestamp/100, '%Y-%m-%d %H')" elapsed_seconds = "(max(timestamp)-min(timestamp))/1000" query = "SELECT {0} as day, {1} as time_of_day, activity_name, {2} as time_elapsed_seconds FROM {3} WHERE device_id = '{4}' GROUP BY day, activity_name, {5}".format(day, time_of_day, elapsed_seconds, _ACTIVITY, _ID, day_and_time_of_day) activities = self.make_query(cursor, query) # now we want to associate activities with locations. This will update the # bins list with activities. self.group_activities_by_location(bins, locations, activities, _EPSILON) db.close() else: queries = [{"query": 'Need to connect from Google Appspot', "results": []}] context = {"queries": queries} logging.info("context") logging.info(context) # and render the response self.render_response('index.html', context)