def main(): global bot global modules init_modules() while True: creds = credentials.get() print creds if credentials.validate(creds) == True: break bot = init_bot(creds) bot.start(on_event) print 'PRIME HAS STARTED' bot.message("I'm alive :)") while True: if 'no-input' not in sys.argv: text = raw_input(">> ") if text == "die": print 'PRIME HAS STOPPED RUNNING' bot.message("I'm dead :(") time.sleep(0.4) break else: bot.message(text) os._exit(6)
def get_gdrive_file_children(id): creds = credentials.get() service = build('drive', 'v3', credentials=creds) page_token = None first_time = True files = [] id = "'" + id + "'" while page_token or first_time: result = service.files().list( pageSize=100, fields= "nextPageToken, files(id, name, mimeType, size, trashed, modifiedTime)", pageToken=page_token, q=id + " in parents").execute() page_token = result.get('nextPageToken') for item in result.get('files', []): if not item["trashed"]: is_folder = item[ 'mimeType'] == 'application/vnd.google-apps.folder' size = int(item['size']) if 'size' in item else 0 date_modified = datetime.datetime.strptime( item['modifiedTime'], '%Y-%m-%dT%H:%M:%S.%fZ') files.append( File(id=item['id'], name=item['name'], is_folder=is_folder, date_modified=date_modified, size=size)) first_time = False return files
def drop_and_rewrite_tables(): creds = credentials.get() TABLES = {} TABLES['daily_histories'] = ( "DROP TABLE IF EXISTS `daily_histories`;" "CREATE TABLE `daily_histories` (" # " `date_id` int(11) NOT NULL AUTO_INCREMENT," " `date` varchar(15) NOT NULL," " `year` int," " `month` int," " `day` int," " `zipcode` int NOT NULL," " `station` varchar(15)," " `name` varchar(30)," " `dapr` varchar(15)," " `mdpr` varchar(15)," " `prcp` decimal(8,4)," " `snow` decimal(8,4)," " `snwd` decimal(8,4)," " `tavg` decimal(8,4)," " `tmin` int," " `tmax` int," " `tobs` varchar(15)," " `wesd` varchar(15)," " `wesf` varchar(15)," " `wt10` varchar(15)," " `wt03` varchar(15)," " `wt04` varchar(15)," " `wt05` varchar(15)," " `wt06` varchar(15)," " `wt11` varchar(15)," " PRIMARY KEY (`date`, `zipcode`)" ") ENGINE=InnoDB") # "STATION","NAME","DATE","DAPR","MDPR","PRCP","SNOW","SNWD","TAVG","TMAX","TMIN","TOBS","WESD","WESF","WT01","WT03","WT04","WT05","WT06","WT11" cnx = mysql.connector.connect(user=creds['user'], password=creds['password'], host='127.0.0.1', database=creds['database']) cursor = cnx.cursor() for table_name in TABLES: table_description = TABLES[table_name] try: print("Creating table {}: ".format(table_name)) cursor.execute(table_description) except mysql.connector.Error as err: if err.errno == errorcode.ER_TABLE_EXISTS_ERROR: print("already exists.") else: print(err.msg) cursor.close() cnx.close()
def create_gdrive_folder(name, parents=None): creds = credentials.get() service = build('drive', 'v3', credentials=creds) file_metadata = { 'name': name, 'mimeType': 'application/vnd.google-apps.folder' } if parents: file_metadata['parents'] = parents file = service.files().create(body=file_metadata, fields="id") return File(id=file['id'], name=name, is_folder=true)
def into_raw_ping_data(ping_result): creds = credentials.get() cnx = mysql.connector.connect(user=creds['user'], password=creds['password'], host='127.0.0.1', database=creds['database']) cursor = cnx.cursor() rpd = ("INSERT INTO raw_ping_data " "(run_date, ping_result_summary_id, icmp_seq, ping_time) " "values(%(run_date)s, %(ping_result_summary_id)s, %(icmp_seq)s, %(ping_time)s)") cursor.execute(rpd, ping_result) cnx.commit() cursor.close() cnx.close()
def get_gdrive_file(id): creds = credentials.get() service = build('drive', 'v3', credentials=creds) # id = "'" + id + "'" result = service.files().get(fileId=id).execute() print(result) is_folder = result['mimeType'] == 'application/vnd.google-apps.folder' size = int(result['size']) if 'size' in result else 0 date_modified = datetime.datetime.strptime( result['modifiedTime'], '%Y-%m-%dT%H:%M:%S.%fZ') if 'modifiedTime' in result else None return File(id=result['id'], name=result['name'], is_folder=is_folder, date_modified=date_modified, size=size)
def list_files(): creds = credentials.get() service = build('drive', 'v3', credentials=creds) # Call the Drive v3 API results = service.files().list( pageSize=10, fields="nextPageToken, files(id, name, parents, mimeType)", q="'root' in parents").execute() items = results.get('files', []) if not items: print('No files found.') else: print('Files:') for item in items: print(u'{0} ({1}) {2} {3}'.format(item['name'], item['id'], item['parents'], item['mimeType']))
def into_ping_result_summary(ping_results): creds = credentials.get() cnx = mysql.connector.connect(user=creds['user'], password=creds['password'], host='127.0.0.1', database=creds['database']) cursor = cnx.cursor() # prs = "INSERT INTO ping_result_summary (run_date, min, max, avg, mdev, total, loss, traceroute_dump) values ({run_date}, {min}, {max}, {avg}, {mdev}, {total}, {loss}, '{traceroute_dump}')".format(run_date=ping_results['run_date']) prs = ("INSERT INTO ping_result_summary " "(run_date, min, max, avg, mdev, total, loss, traceroute_dump) " "values (%(run_date)s, %(min)s, %(max)s, %(avg)s, %(mdev)s, %(total)s, %(loss)s, %(traceroute_dump)s)") cursor.execute(prs, ping_results) lastrowid = cursor.lastrowid cnx.commit() cursor.close() cnx.close() return lastrowid
def into_daily_histories(daily_history): creds = credentials.get() cnx = mysql.connector.connect(user=creds['user'], password=creds['password'], host='127.0.0.1', database=creds['database']) # print daily_history.keys() for k in BREAKING_MISSES: if daily_history[k] == '': return for k in OVERRIDE_MISSES: if daily_history[k] == '': daily_history[k] = 0.0 cursor = cnx.cursor() # prs = "INSERT INTO ping_result_summary (run_date, min, max, avg, mdev, total, loss, traceroute_dump) values ({run_date}, {min}, {max}, {avg}, {mdev}, {total}, {loss}, '{traceroute_dump}')".format(run_date=ping_results['run_date']) prs = ( "INSERT INTO daily_histories " "(zipcode,STATION,NAME,DATE,DAPR,MDPR,PRCP,SNOW,SNWD,TMAX,TMIN,TOBS,WESD,WESF,year,month,day) " "values (%(zipcode)s, %(STATION)s, %(NAME)s, %(DATE)s, %(DAPR)s, %(MDPR)s, %(PRCP)s, %(SNOW)s, %(SNWD)s, %(TMAX)s, %(TMIN)s, " " %(TOBS)s, %(WESD)s, %(WESF)s, %(year)s, %(month)s, %(day)s)") # print prs cursor.execute(prs, daily_history) lastrowid = cursor.lastrowid cnx.commit() cursor.close() cnx.close() return lastrowid
from flask import Flask, render_template, request, redirect, url_for, make_response from markupsafe import escape import pymongo import datetime from bson.objectid import ObjectId import os import subprocess # instantiate the app app = Flask(__name__) # load credentials and configuration options from .env file # if you do not yet have a file named .env, make one based on the template in env.example import credentials config = credentials.get() # turn on debugging if in development mode if config['FLASK_ENV'] == 'development': # turn on debugging, if in development app.debug = True # debug mnode # make one persistent connection to the database connection = pymongo.MongoClient("class-mongodb.cims.nyu.edu", 27017, username="******", password="******", authSource="bch305") db = connection['bch305'] # store a reference to the database # set up the routes
import sys from pathlib import Path from googleapiclient.discovery import build from googleapiclient.errors import HttpError from requests.exceptions import HTTPError import credentials import utils # drive = build('drive', 'v3', credentials=credentials.get()) chunk_size = 1024 * 1024 * 20 DEBUG = True ACCESS_TOKEN = credentials.get().token API_KEY = None with open('api_key.txt') as file: API_KEY = file.read() class File: def __init__(self, id, name, is_folder=False, date_modified=None, size=0): super().__init__() self.id = id self.name = name self._date_modified = date_modified self.is_folder = is_folder self.size = size