def handle_action_tag(self, ttype, data): logging.debug("Open : %s", data) gauth = GoogleAuth() drive = GoogleDrive(gauth) gauth.LoadCredentialsFile("mycreds.txt") check = os.stat("mod/hashtags").st_size ######---find FOLDER ID #file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList() #for file1 in file_list: # print 'title: %s, id: %s' % (file1['title'], file1['id']) ######---authentication if gauth.credentials is None: gauth.LocalWebserverAuth() if gauth.access_token_expired: gauth.Refresh() else: gauth.Authorize() gauth.SaveCredentialsFile("mycreds.txt") if (data == 'DOCS'): ### upload achat and depense file1 = drive.CreateFile({ 'parents': [{ "id": '0B8mDDuHeuNHDfmM0OXlWTndpdkczNHBBY3VJaXJ2ZlNqVVBoWWk3UDZnc0NvMS1Gd1JtWU0' }] }) file1.SetContentFile('mod/achat.txt') file1.Upload() file2 = drive.CreateFile({ 'parents': [{ "id": '0B8mDDuHeuNHDfmM0OXlWTndpdkczNHBBY3VJaXJ2ZlNqVVBoWWk3UDZnc0NvMS1Gd1JtWU0' }] }) file2.SetContentFile('mod/depense.txt') file2.Upload() logging.debug("Upload done.") if (data == 'PICS'): file = open('mod/filepath', 'r') path = file.readlines() pic = path[0] if (check == 0): ### list every photo onlyfiles = [ f for f in listdir('/home/pi/images/') if isfile(join('/home/pi/images/', f)) ] ### upload pic file1 = drive.CreateFile({ 'parents': [{ "id": '0B8mDDuHeuNHDfmM0OXlWTndpdkczNHBBY3VJaXJ2ZlNqVVBoWWk3UDZnc0NvMS1Gd1JtWU0' }] }) file1.SetContentFile(pic) file1.Upload() logging.debug("Upload done.") file = open('mod/filepath', 'w').close() else: file = open('mod/hashtags', 'r') hash = file.readlines() hashtag = hash[0] hashtag = hashtag[1:-1] file_list = drive.ListFile({ 'q': "'root' in parents and trashed=false" }).GetList() for file1 in file_list: print 'title: %s, id: %s' % (file1['title'], file1['id']) print hashtag print file1['title'] if (file1['title'] == hashtag): file1 = drive.CreateFile( {'parents': [{ "id": file1['id'] }]}) file1.SetContentFile(pic) file1.Upload() logging.debug("Upload done.") file = open('mod/filepath', 'w').close() file = open('mod/hashtags', 'w').close()
class GDriveServerNode(object): folder_mime_type = 'application/vnd.google-apps.folder' folder_url_format = 'https://drive.google.com/drive/folders/{}' file_url_format = 'https://drive.google.com/uc?id={}' def __init__(self): settings_yaml = rospy.get_param('~settings_yaml', None) self.share_type = rospy.get_param('~share_type', 'anyone') self.share_value = rospy.get_param('~share_value', 'anyone') self.share_role = rospy.get_param('~share_role', 'reader') self.share_with_link = rospy.get_param('~share_with_link', True) if settings_yaml is not None: self.gauth = GoogleAuth(settings_yaml) else: rospy.logerr('param: ~settings_yaml is not correctly set.') sys.exit(1) rospy.loginfo('Google drive authentication starts.') self.gauth.LocalWebserverAuth() self.gdrive = GoogleDrive(self.gauth) rospy.loginfo('Google drive authentication finished.') self.upload_server = rospy.Service('~upload', Upload, self._upload_cb) self.upload_multi_server = rospy.Service('~upload_multi', MultipleUpload, self._upload_multi_cb) def _upload_cb(self, req): timestamp = '{0:%Y%m%d%H%M%S}'.format(datetime.datetime.now()) parents_path = req.parents_path parents_id = req.parents_id # response initialization res = UploadResponse() res.success = False res.file_id = '' res.file_url = '' if parents_id and parents_path: rospy.logerr('parents_path and parents_id is both set.') rospy.logerr('parents_id: {} is selected to upload.') parents_path = '' if parents_path: try: parents_id = self._get_parents_id(parents_path, mkdir=True) except (ValueError, ApiRequestError) as e: rospy.logerr(e) rospy.logerr( 'Failed to get parents_id: {}'.format(parents_path)) return res # root elif parents_id == '' and parents_path == '': parents_id = '' if req.use_timestamp_folder: try: parents_id = self._get_parents_id([timestamp], parents_id=parents_id, mkdir=True) except (ValueError, ApiRequestError) as e: rospy.logerr(e) rospy.logerr('Failed to get parents_id: {} in {}'.format( timestamp, self.folder_url_format.format(parents_id))) return res success, file_id, file_url = self._upload_step( req.file_path, req.file_title, parents_id, req.use_timestamp_file_title, timestamp) res.success = success res.file_id = file_id res.file_url = file_url res.parents_id = parents_id res.parents_url = self.folder_url_format.format(parents_id) return res def _upload_multi_cb(self, req): timestamp = '{0:%Y%m%d%H%M%S}'.format(datetime.datetime.now()) parents_path = req.parents_path parents_id = req.parents_id # response initialization res = MultipleUploadResponse() res.successes = [False] * len(req.file_titles) res.file_ids = [''] * len(req.file_titles) res.file_urls = [''] * len(req.file_titles) if parents_id and parents_path: rospy.logerr('parents_path and parents_id is both set.') rospy.logerr('parents_id: {} is selected to upload.') parents_path = '' if parents_path: try: parents_id = self._get_parents_id(parents_path, mkdir=True) except (ValueError, ApiRequestError) as e: rospy.logerr(e) rospy.logerr( 'Failed to get parents_id: {}'.format(parents_path)) return res # root elif parents_id == '' and parents_path == '': parents_id = '' if req.use_timestamp_folder: try: parents_id = self._get_parents_id([timestamp], parents_id=parents_id, mkdir=True) except (ValueError, ApiRequestError) as e: rospy.logerr(e) rospy.logerr('Failed to get parents_id: {} in {}'.format( timestamp, self.folder_url_format.format(parents_id))) return res for i, (file_path, file_title) in enumerate(zip(req.file_paths, req.file_titles)): success, file_id, file_url = self._upload_step( file_path, file_title, parents_id, req.use_timestamp_file_title, timestamp) res.successes[i] = success res.file_ids[i] = file_id res.file_urls[i] = file_url res.parents_id = parents_id res.parents_url = self.folder_url_format.format(parents_id) return res def _upload_step(self, file_path, file_title, parents_id, use_timestamp_file_title=False, timestamp=None): file_title = file_title if file_title else file_path.split('/')[-1] file_path = os.path.expanduser(file_path) if use_timestamp_file_title: file_title = '{}_{}'.format(timestamp, file_title) success = False file_id = '' file_url = '' try: file_id = self._upload_file(file_path, file_title, parents_id=parents_id) file_url = self.file_url_format.format(file_id) success = True except ApiRequestError as e: rospy.logerr(e) rospy.logerr('Failed to upload: {} -> {}', file_path, self.folder_url_format.format(parents_id)) return success, file_id, file_url def _upload_file(self, file_path, file_title, parents_id=None): rospy.loginfo('Start uploading a file: {}'.format(file_title)) if parents_id: gfile = self.gdrive.CreateFile({'parents': [{'id': parents_id}]}) else: gfile = self.gdrive.CreateFile() gfile.SetContentFile(file_path) gfile['title'] = file_title gfile.Upload() gfile.InsertPermission({ 'type': self.share_type, 'value': self.share_value, 'role': self.share_role, 'withLink': self.share_with_link, }) rospy.loginfo('Finish uploading a file: {}'.format(file_title)) return gfile['id'] def _upload_folder(self, folder_title, parents_id=None): rospy.loginfo('Start making a folder: {}'.format(folder_title)) if parents_id: gfolder = self.gdrive.CreateFile({ 'title': folder_title, 'parents': [{ 'id': parents_id }], 'mimeType': 'application/vnd.google-apps.folder' }) else: gfolder = self.gdrive.CreateFile({ 'title': folder_title, 'mimeType': 'application/vnd.google-apps.folder' }) gfolder.Upload() rospy.loginfo('Finish making a folder: {}'.format(folder_title)) return gfolder['id'] def _get_parents_id(self, parents_path, parents_id=None, mkdir=False): if parents_path == '': return None if not isinstance(parents_path, list): parents_path = [p for p in parents_path.split('/') if p != ''] folder_title = parents_path[0] parent = parents_id if parents_id else 'root' gfiles = self.gdrive.ListFile( {'q': "'{}' in parents and trashed=false".format(parent)}) gfiles = gfiles.GetList() gfolders = [] for gf in gfiles: if (gf['mimeType'] == self.folder_mime_type and gf['title'] == folder_title): gfolders.append(gf) if len(parents_path) == 1: if len(gfolders) > 0: return gfolders[0]['id'] if mkdir: folder_id = self._upload_folder(folder_title, parents_id=parents_id) return folder_id else: raise ValueError( 'Folder is not found: {}'.format(folder_title)) else: if len(gfolders) > 0 or mkdir: if len(gfolders) > 0: next_parents_id = gfolders[0]['id'] elif mkdir: next_parents_id = self._upload_folder( folder_title, parents_id=parents_id) folder_id = self._get_parents_id(parents_path[1:], parents_id=next_parents_id, mkdir=mkdir) return folder_id else: raise ValueError('folder is not found: {}', folder_title)
import datetime import pandas as pd import forecastio import time import os from pydrive.auth import GoogleAuth from pydrive.drive import GoogleDrive import zipfile import math gauth = GoogleAuth() gauth.LocalWebserverAuth( ) # Creates local webserver and auto handles authentication. drive = GoogleDrive(gauth) def daily_weather(api_key, d): ''' Pull historical daily weather from Dark Sky API''' # DC latitude and longitude coordinate lat = 38.9072 lng = -77.0369 # Pull Daily forcast from Dark Sky API forecast = forecastio.load_forecast(api_key, lat, lng, time=d) daily = forecast.daily() # Convert daily data dictionary to dataframe daily_data = daily.data[0].d daily_data_df = pd.DataFrame(daily_data, index=[d])
RUN_TITLE = "run01b" gauth_file = 'app.auth' folderid = open('folderid.key').readline().rstrip() outdir = 'snapshots' interval = 30 # seconds upload_to_drive = True CAMERA_INDEX = 1 # leave 0 unless you have more than one webcam cutoff_time = datetime.now() + timedelta(hours=12) # 12 hours after start # GOOGLE DRIVE g_login = GoogleAuth() g_login.LoadCredentialsFile(gauth_file) if g_login.credentials is None: g_login.LocalWebserverAuth() elif g_login.access_token_expired: g_login.Refresh() else: g_login.Authorize() g_login.SaveCredentialsFile(gauth_file) drive = GoogleDrive(g_login) # WEBCAM vc = cv2.VideoCapture(CAMERA_INDEX) vc.set(cv2.CAP_PROP_BUFFERSIZE, 1) # extra frame reads for buffer clearing lol if vc.isOpened(): # try to get the first frame rval, frame = vc.read()
def main(): start = timer() print('Processing Start time: %.1f' % (start)) print("current time", datetime.now()) gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) # Auto-iterate through all files that matches this query file_list = drive.ListFile({'q': "'root' in parents"}).GetList() for file in file_list: # print('title: {}, id: {}'.format(file1['title'], file1['id'])) file_id = None if file['title'] == "semanticsegmentation": print('Folder Found') file_id = file['id'] break if file_id is not None: classes = ["wall","building;edifice","sky","floor;flooring","tree","ceiling","road;route","bed","windowpane;window","grass","cabinet","sidewalk;pavement","person;individual;someone;somebody;mortal;soul","earth;ground","door;double;door","table","mountain;mount","plant;flora;plant;life","curtain;drape;drapery;mantle;pall","chair","car;auto;automobile;machine;motorcar","water","painting;picture","sofa;couch;lounge","shelf","house","sea","mirror","rug;carpet;carpeting","field","armchair","seat","fence;fencing","desk","rock;stone","wardrobe;closet;press","lamp","bathtub;bathing;tub;bath;tub","railing;rail","cushion","base;pedestal;stand","box","column;pillar","signboard;sign","chest;of;drawers;chest;bureau;dresser","counter","sand","sink","skyscraper","fireplace;hearth;open;fireplace","refrigerator;icebox","grandstand;covered;stand","path","stairs;steps","runway","case;display;case;showcase;vitrine","pool;table;billiard;table;snooker;table","pillow","screen;door;screen","stairway;staircase","river","bridge;span","bookcase","blind;screen","coffee;table;cocktail;table","toilet;can;commode;crapper;pot;potty;stool;throne","flower","book","hill","bench","countertop","stove;kitchen;stove;range;kitchen;range;cooking;stove","palm;palm;tree","kitchen;island","computer;computing;machine;computing;device;data;processor;electronic;computer;information;processing;system","swivel;chair","boat","bar","arcade;machine","hovel;hut;hutch;shack;shanty","bus;autobus;coach;charabanc;double-decker;jitney;motorbus;motorcoach;omnibus;passenger;vehicle","towel","light;light;source","truck;motortruck","tower","chandelier;pendant;pendent","awning;sunshade;sunblind","streetlight;street;lamp","booth;cubicle;stall;kiosk","television;television;receiver;television;set;tv;tv;set;idiot;box;boob;tube;telly;goggle;box","airplane;aeroplane;plane","dirt;track","apparel;wearing;apparel;dress;clothes","pole","land;ground;soil","bannister;banister;balustrade;balusters;handrail","escalator;moving;staircase;moving;stairway","ottoman;pouf;pouffe;puff;hassock","bottle","buffet;counter;sideboard","poster;posting;placard;notice;bill;card","stage","van","ship","fountain","conveyer;belt;conveyor;belt;conveyer;conveyor;transporter","canopy","washer;automatic;washer;washing;machine","plaything;toy","swimming;pool;swimming;bath;natatorium","stool","barrel;cask","basket;handbasket","waterfall;falls","tent;collapsible;shelter","bag","minibike;motorbike","cradle","oven","ball","food;solid;food","step;stair","tank;storage;tank","trade;name;brand;name;brand;marque","microwave;microwave;oven","pot;flowerpot","animal;animate;being;beast;brute;creature;fauna","bicycle;bike;wheel;cycle","lake","dishwasher;dish;washer;dishwashing;machine","screen;silver;screen;projection;screen","blanket;cover","sculpture","hood;exhaust;hood","sconce","vase","traffic;light;traffic;signal;stoplight","tray","ashcan;trash;can;garbage;can;wastebin;ash;bin;ash-bin;ashbin;dustbin;trash;barrel;trash;bin","fan","pier;wharf;wharfage;dock","crt;screen","plate","monitor;monitoring;device","bulletin;board;notice;board","shower","radiator","glass;drinking;glass","clock","flag"] files = glob.glob(r'/Users/divyachandana/Documents/NJIT/work/summertasks/jun1-jun5/atlanta/*.jpg') print("Total Files",len(files)) columns = ['filename','class','total_pixel','individual_pixel','ratio','timestamp'] # ---------- drive code ----- with open('semantic_results_atlanta.csv','a') as csvfile: csvwriter = csv.writer(csvfile,lineterminator='\n') # csvwriter.writerow(columns) # i=0 for f in files: file_check_query = "select count(*) from {} where filename like '%{}%'".format('semantic_results_atlanta', os.path.basename(f)) # print(file_check_query) # i += 1 # print(i) count = dbms.get_count_result(file_check_query) # print(count) if count > 0: continue # print('resuming',f) try: img = image.imread(f) img = image.resize_short(img, 1024) # img = image.resize_short(img, 100) # print("filename: ", f) # ctx = mx.gpu(0) img = test_transform(img, ctx) # print("img: ", img) output = model.predict(img) # print("output: ", output) predict = mx.nd.squeeze(mx.nd.argmax(output, 1)).asnumpy() # print("predict: ", predict) mask = get_color_pallete(predict, 'ade20k') # predict.save('predict.png') # mmask = mpimg.imread('output.png') predict = predict.astype(numpy.uint8) convert_single_array = numpy.array(predict) unique_numbers = numpy.unique(convert_single_array) # print(unique_numbers) new_basename = os.path.basename(f).replace(".jpg", ".png") new_name = os.path.join('output/', new_basename) mask.save(new_name) # color_img = image.imread(new_name) # colors, counts = numpy.unique(color_img.reshape(-1, 3), return_counts=True, axis=0) total_pixel = numpy.sum(predict) d_file = drive.CreateFile({'parents': [{'id': file_id}], 'title': os.path.basename(new_name)}) d_file.SetContentFile(new_name) d_file.Upload() # print('Created file %s with mimeType %s' % (d_file['title'], d_file['mimeType'])) combile_all_csv_data = [] combine_sql_srting_format = [] for i in unique_numbers: individual_count = numpy.sum(predict == i) # print(individual_count) csv_data = [] csv_data.append(os.path.basename(f)) csv_data.append(classes[i]) csv_data.append(total_pixel) csv_data.append(individual_count) csv_data.append(round((individual_count/total_pixel),6)) time_stamp = datetime.now() csv_data.append(time_stamp) # csv_data = [filename,predict,colors,counts,total_pixel] # print(csv_data) combile_all_csv_data.append(csv_data) sql_srting = ["NULL" if val == None else "'"+str(val)+"'" for val in csv_data] sql_srting_format = ",".join([str(val) for val in sql_srting]) combine_sql_srting_format.append(sql_srting_format) csvwriter.writerows(combile_all_csv_data) dbms.insertmany_sqlite3('semantic_results_atlanta',','.join(columns),combine_sql_srting_format) os.remove(new_name) # if idx % 10 == 0: # print("Processed: ", idx) except Exception as e: print("Error in :", ' ' + f, e) continue print('Finished') end = timer() print('Processing time: %.1f' % (end - start))
def getCredentials(): gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) return drive
path = r'/home/wolf/Documents/Google_Drive' file = path + os.sep + 'ip.txt' time_stamp = datetime.datetime.now().strftime("%Y%m%d %H:%M:%S") ip = requests.get('https://api.ipify.org').text try: socket.inet_aton(ip) except socket.error: ip = "0.0.0.0" data = "[" + time_stamp + "] Public IP: " + ip auth = GoogleAuth() auth.LoadCredentialsFile(path + os.sep + "Credentials.token") if auth.credentials == None: auth.LocalWebserverAuth() elif auth.access_token_expired: auth.Refresh() else: auth.Authorize() auth.SaveCredentialsFile(path + os.sep + "Credentials.token") drive = GoogleDrive(auth) folder_id = '18Jmqqe6PU0RWjeUiXcFbRgfErqcEE7Ad' file_list = drive.ListFile({ 'q': "'" + folder_id + "' in parents and trashed=false" }).GetList() if len(file_list) != 0: print("Updating IP File...") file = file_list[0] text = file.GetContentString()
def upload_files(images, csv_name, target_drive_dir='slideInfo_BioBasic'): logger.log('begin file upload') client_secrets_path = basest_dir + "/client_secrets.json" credentials_path = basest_dir + "/credentials.txt" GoogleAuth.DEFAULT_SETTINGS['client_config_file'] = client_secrets_path # Create google account authentication objects gauth = GoogleAuth() logger.log('Looking for credentials') if os.path.exists(credentials_path): logger.log('found a credentials') gauth.LoadCredentialsFile(credentials_path) if gauth.credentials is None: logger.log('local connect to website') gauth.LocalWebserverAuth() elif gauth.access_token_expired: logger.log('refresh branch') gauth.Refresh() else: logger.log('authorize branch') gauth.Authorize() logger.log('creating connection to google drive') gauth.SaveCredentialsFile(credentials_path) drive = GoogleDrive(gauth) logger.log('connection established') # Upload the template files to the user if target_drive_dir == 'slideInfo_BioBasic': upload_template = drive.CreateFile({'title': 'TEMPLATE_bio_bas'}) upload_template.SetContentFile(basest_dir + '/TEMPLATE_bio_bas.pptx') upload_template.Upload() elif target_drive_dir == 'slideInfo_BioAdv': upload_template = drive.CreateFile({'title': 'TEMPLATE_bio_adv'}) upload_template.SetContentFile(basest_dir + '/TEMPLATE_bio_adv.pptx') upload_template.Upload() ''' Find the name of the folder we want to upload to ''' # Define the folder we want to upload to target_folder_name = target_drive_dir target_folder_id = '' # Find the list of all of the files in the google drive file_list = drive.ListFile({ 'q': "'root' in parents and trashed=false" }).GetList() # Loop through all of the files in the for file_object in file_list: # Check if the current one is our target if file_object['title'] == target_folder_name: # Save the folder id target_folder_id = file_object['id'] logger.log("folder id: " + target_folder_id) # upload the CSV containing only the info on the chosen animals for images upload_csv = drive.CreateFile({ 'title': csv_name, 'parents': [{ 'id': target_folder_id }] }) upload_csv.SetContentFile(basest_dir + "/" + csv_name) upload_csv.Upload() logger.log("uploaded chosen_mammals csv") # Loop through the images for image_name in images: upload_image = drive.CreateFile({ 'title': image_name, 'parents': [{ 'id': target_folder_id }] }) #upload_image.SetContentFile( "python_scripts/biodiversity/animal_images/" + image_name ) logger.log(image_name) if __name__ == "__main__": upload_image.SetContentFile("animal_images/" + image_name + ".jpg") else: upload_image.SetContentFile(basest_dir + '/animal_images/' + image_name + ".jpg") upload_image.Upload()
def send_csv_to_drive(fileName, fileAlias, target_dir="slideInfo_BioBasic"): logger.log('begin file upload') client_secrets_path = BASE_DIR + "/client_secrets.json" credentials_path = BASE_DIR + "/credentials.txt" GoogleAuth.DEFAULT_SETTINGS['client_config_file'] = client_secrets_path # Create google account authentication objects gauth = GoogleAuth() logger.log('Looking for credentials') if os.path.exists(credentials_path): logger.log('found a credentials') gauth.LoadCredentialsFile(credentials_path) if gauth.credentials is None: # or gauth.access_token_expired: logger.log('local connect to website') gauth.LocalWebserverAuth() gauth.SaveCredentialsFile(credentials_path) elif gauth.access_token_expired: logger.log('refresh branch') gauth.Refresh() else: logger.log('authorize branch') gauth.Authorize() logger.log('creating connection to google drive') gauth.SaveCredentialsFile(credentials_path) drive = GoogleDrive(gauth) logger.log('connection established') ''' Find the name of the folder we want to upload to ''' # Define the folder we want to upload to target_folder_name = target_dir target_folder_id = '' logger.log('finding drive folder: ' + target_dir) folder_not_found = True while (folder_not_found): # Find the list of all of the files in the google drive file_list = drive.ListFile({ 'q': "'root' in parents and trashed=false" }).GetList() # Loop through all of the files in the for file_object in file_list: # Check if the current one is our target if file_object['title'] == target_folder_name: # Save the folder id target_folder_id = file_object['id'] # Exit the while loop folder_not_found = False # Check if the folder was found if target_folder_id == '': logger.log('folder not found. Creating one') # Create the folder we want folder = drive.CreateFile({ 'title': target_folder_name, 'mimeType': 'application/vnd.google-apps.folder' }) # Upload the folder to the drive folder.Upload() # The loop will go again, but now it will find the folder logger.log("folder found. id: " + target_folder_id) upload_csv = drive.CreateFile({ 'title': fileAlias + '.csv', 'parents': [{ 'id': target_folder_id }] }) upload_csv.SetContentFile(fileName + '.csv') upload_csv.Upload() logger.log('file uploaded')
def auth(): gauth = GoogleAuth() gauth.LocalWebserverAuth() return GoogleDrive(gauth)
class GDrive(object): def __init__(self, settings, folder): self.settings = settings self.folder_id = folder self.auth = None self.drive = None self.team_drive = self.settings['team_drive'] def get_name(self): return 'Google Drive' def connect(self, filename='credentials.json'): self.auth = GoogleAuth() self.auth.LoadCredentialsFile(filename) if self.auth.credentials is None: if sys.platform == 'win32': self.auth.LocalWebserverAuth() else: raise Exception('Google Drive credentials have expired.') elif self.auth.access_token_expired: self.auth.Refresh() else: self.auth.Authorize() self.auth.SaveCredentialsFile(filename) self.drive = GoogleDrive(self.auth) self.root_folders = self.drive.list_folders_in(self.folder_id) def create_folder(self, name, folder_id): folder = self.drive.CreateFile({ 'title': name, 'parents': [{ 'kind': 'drive#fileLink', 'teamDriveId': self.team_drive, 'id': folder_id }], 'mimeType': FOLDER_MIME }) folder.Upload(param={'supportsTeamDrives': True}) return folder def create_folder_in_root(self, name): folder = self.create_folder(name, self.folder_id) self.root_folders.append(folder) return folder def upload_file(self, source_filename, folder_path, filename): file = self.drive.CreateFile({ 'title': filename, 'parents': [{ 'kind': 'drive#fileLink', 'teamDriveId': self.team_drive, 'id': folder_path }] }) file.SetContentFile(source_filename) file.Upload(param={'supportsTeamDrives': True}) return file def search_files(self, query): return self.drive.ListFile({ 'q': query, 'corpora': 'teamDrive', 'teamDriveId': self.team_drive, 'includeTeamDriveItems': 'true', 'supportsTeamDrives': 'true', 'maxResults': 20000 }).GetList() def list_folders_in(self, folder_id): return self.search_files( "'{0}' in parents and trashed=false and mimeType='{1}'".format( folder_id, FOLDER_MIME)) def list_files_in(self, folder_id): return self.search_files( "'{0}' in parents and trashed=false".format(folder_id)) def search_for_file_in(self, folder_id, filename): return self.search_files( "trashed=false and title='{0}'".format(filename)) def find_file_in_list(self, files, filename): for file in files: if file['title'] == filename: return file def find_file_in_root(self, filename): return self.find_file_in_list(self.root_folders, filename) def get_file_size(self, file): return int(file[0]['fileSize']) def get_folder_path(self, folder): return folder['id']
def googledrive_login(): gauth = GoogleAuth() gauth.LocalWebserverAuth() return GoogleDrive(gauth)
def auth_drive(): gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) return drive
def login(): global gauth, drive gauth = GoogleAuth() gauth.LocalWebserverAuth() # Creates local webserver and auto handles authentication drive = GoogleDrive(gauth) # Create GoogleDrive instance with authenticated GoogleAuth instance
def publish_articles(last_attempt_timestamp): spreadsheet_id = os.getenv("SPREADSHEET_ID") range_name = os.getenv('RANGE_NAME') value_render_option = 'FORMULA' value_input_option = 'USER_ENTERED' weekdays = { 'понедельник': 0, 'вторник': 1, 'среда': 2, 'четверг': 3, 'пятница': 4, 'суббота': 5, 'воскресенье': 6, } creds = get_credentials() service = build('sheets', 'v4', credentials=creds) sheet = service.spreadsheets() result = sheet.values().get(spreadsheetId=spreadsheet_id, range=range_name, valueRenderOption=value_render_option ).execute() values = result.get('values', []) gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) with tempfile.TemporaryDirectory() as tmpdirname: for row in values: if str(row[7]).upper() == 'ДА': continue post_weekday = weekdays.get(row[3], None) today_weekday = datetime.date.today().weekday() if post_weekday is None or post_weekday != today_weekday: continue post_hour = row[4] if not float(post_hour).is_integer(): continue now = datetime.datetime.now() post_datetime = datetime.datetime(year=now.year, month=now.month, day=now.day, hour=int(post_hour)).timestamp() if post_datetime <= last_attempt_timestamp or post_datetime >= now.timestamp(): continue social_networks = '' if str(row[0]).upper() == 'ДА': social_networks += 'vk,' if str(row[1]).upper() == 'ДА': social_networks += 'tg,' if str(row[2]).upper() == 'ДА': social_networks += 'fb' text_filepath = get_article(get_id(row[5]), drive, tmpdirname) image_filepath = get_image(get_id(row[6]), drive, tmpdirname) post.post_to_social_networks(social_networks, image_filepath, text_filepath) row[7] = 'да' body = { 'values': values } service.spreadsheets().values().update( spreadsheetId=spreadsheet_id, range=range_name, valueInputOption=value_input_option, body=body).execute()
class GSheetsParser: gsheet_id = '' cred_file = '' gauth = "This variable will have the Google Authorization file" scope = [] spec_metadata = {} bsc_specification = {} def __init__(self): self.gsheet_id = '1h0-fgqnRe25-tVCmu2yWNQjthLzgkW4a1TVNMpCABlc' #self.cred_file = 'client_secrets.json' #self.scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] self.spec_metadata = {} self.bsc_specification = {} creds_path = "spec2model/mycreds.txt" self.gauth = GoogleAuth() # Try to load saved client credentials self.gauth.LoadCredentialsFile(creds_path) if self.gauth.credentials is None: # Authenticate if they're not there self.gauth.LocalWebserverAuth() elif self.gauth.access_token_expired: # Refresh them if expired self.gauth.Refresh() else: # Initialize the saved creds self.gauth.Authorize() # Save the current credentials to a file self.gauth.SaveCredentialsFile(creds_path) def set_gsheet_id(self, gsheet_id): self.gsheet_id = gsheet_id def set_spec_metadata(self, spec_metadata): self.spec_metadata = spec_metadata def check_url(self, spec_url): if spec_url == None: return "err_404" r = requests.get(spec_url) if r == 404: return "err_404" else: return spec_url def __get_mapping_description(self, mapping_sheet): mapping_description = {} mapping_description['name'] = self.spec_metadata['name'] print("Parsing %s Google Sheet" % mapping_description['name']) mapping_description['g_mapping_file'] = self.spec_metadata[ 'g_mapping_file'] mapping_description['spec_mapping_url'] = self.spec_metadata[ 'spec_mapping_url'] mapping_description['status'] = self.spec_metadata['status'] mapping_description['spec_type'] = self.spec_metadata['spec_type'] mapping_description[ 'gh_folder'] = 'https://github.com/BioSchemas/specifications/tree/master/' + self.spec_metadata[ 'name'] mapping_description[ 'gh_examples'] = 'https://github.com/BioSchemas/specifications/tree/master/' + self.spec_metadata[ 'name'] + '/examples' mapping_description[ 'gh_tasks'] = 'https://github.com/BioSchemas/bioschemas/labels/type%3A%20' + self.spec_metadata[ 'name'] mapping_description[ 'edit_url'] = 'https://github.com/BioSchemas/specifications/tree/master/' + self.spec_metadata[ 'name'] + '/specification.html' mapping_description['use_cases_url'] = self.check_url( self.spec_metadata['use_cases_url']) mapping_description['version'] = self.spec_metadata['version'] mapping_description['subtitle'] = mapping_sheet.acell('B1').value mapping_description['description'] = mapping_sheet.acell('B2').value mapping_description['parent_type'] = mapping_sheet.acell( 'A6').value[8:].strip() return mapping_description def get_mapping_g_sheets(self): client = gspread.authorize(self.gauth.credentials) print("Parsing %s file." % self.spec_metadata['g_mapping_file']) mapping_sheet = client.open_by_key(self.gsheet_id).get_worksheet(0) spec_description = self.__get_mapping_description(mapping_sheet) sdo_props = get_properties_in_hierarchy( spec_description['parent_type']) spec_description['hierarchy'] = get_hierarchy(sdo_props) print("Prepared schema.org properties for hierarchy %s" % str(spec_description['hierarchy'])) print("Classifing %s properties" % spec_description['name']) mapping_props = get_mapping_properties(mapping_sheet, spec_description['spec_type']) formatted_props = get_formatted_props(sdo_props, mapping_props, spec_description['name'], spec_description['spec_type']) spec_description.update(formatted_props) return spec_description
class NdpGrdriveDate(Config): def __init__(self): super(NdpGrdriveDate, self).__init__() GoogleAuth.DEFAULT_SETTINGS[ 'client_config_file'] = '/home/groupm/mediaops-project/mediaops/Classes/DataReaders/client_secret.json' mycred = '/home/groupm/mediaops-project/mediaops/Classes/DataReaders/mycreds.txt' self.gauth = GoogleAuth() self.gauth.LoadCredentialsFile(mycred) if self.gauth.credentials is None: self.gauth.LocalWebserverAuth() elif self.gauth.access_token_expired: self.gauth.Refresh() else: self.gauth.Authorize() self.gauth.SaveCredentialsFile(mycred) self.drive = GoogleDrive(self.gauth) def file_objects(self): # publisher file # print(file_obj["title"], file_obj["mimeType"]) file_obj_publisher_file = self.drive.CreateFile( {'id': self.section_value[15]}) self.file_obj_publisher_file = file_obj_publisher_file print('title: {}, id: {}'.format(self.file_obj_publisher_file['title'], self.file_obj_publisher_file['id'])) print('downloading to {}'.format(self.file_obj_publisher_file)) # Lead file(US/CA) file_obj_lead_file = self.drive.CreateFile( {'id': self.section_value[16]}) self.file_obj_lead_file = file_obj_lead_file print('title: {}, id: {}'.format(self.file_obj_lead_file['title'], self.file_obj_lead_file['id'])) print('downloading to {}'.format(self.file_obj_lead_file)) #Uk Files file_list_uk = self.drive.ListFile({ 'q': "'1gzbtbqWyQEbJCfyRuYGsXKr4boOGdsrh' in parents" }).GetList() self.file_list_uk = file_list_uk def download_files(self): self.file_obj_publisher_file.GetContentFile( self.section_value[12] + 'Sage Global - Publisher Data - Daily.xlsx', mimetype= 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) self.file_obj_lead_file.GetContentFile( self.section_value[12] + 'NDP - Sage NA Lead Gen - Content Synd Tracker.xlsx', mimetype= 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) def download_uk_folder(self): for f in self.file_list_uk: print('title: {}, id: {}'.format(f['title'], f['id'])) fname = f['title'] print('downloading to {}'.format(fname)) f = self.drive.CreateFile({'id': f['id']}) f.GetContentFile( self.section_value[14] + fname + ".xlsx", mimetype= 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) def main(self): self.file_objects() self.download_files() self.download_uk_folder()
def auth_and_save_credential(): gAuth = GoogleAuth() gAuth.LocalWebserverAuth() gAuth.SaveCredentialsFile("credentials.txt")
class FileUploader: # Default user is authenticated user. user_id = 'me' query_list = [] google_auth = None execute_date = date.today().strftime("%B %d, %Y") folder_name = ('Files uploaded ' + date.today().strftime("%B %d, %Y")) emails_ids = [] def __init__(self): """ Authenticate user for first time then build services. First authentication will require user input in browser """ self.authentication() self.mail_service = build( 'gmail', 'v1', credentials=self.google_auth.credentials) # Gmail API self.drive_service = build( 'drive', 'v3', credentials=self.google_auth.credentials) # Drive API self.py_drive = GoogleDrive(self.google_auth) # PyDrive Drive API def authentication(self): self.google_auth = GoogleAuth() self.google_auth.LocalWebserverAuth() self.google_auth.Authorize() c = 0 def refresh_services(self): self.authentication() # TODO Check if need to recreate services self.mail_service = build( 'gmail', 'v1', credentials=self.google_auth.credentials) # Gmail API self.drive_service = build( 'drive', 'v3', credentials=self.google_auth.credentials) # Drive API self.py_drive = GoogleDrive(self.google_auth) # PyDrive Drive API def upload_files(self, query_list=None, folder_name=('Files uploaded ' + date.today().strftime("%B %d, %Y"))): """ Execute file upload. Args: query_list: String list used to filter emails. If not specified then use empty list to avoid error. folder_name: Folder name in which filed should be uploaded. If not specified then use default name:'Files uploaded ' + date.today. """ if query_list is not None: self.query_list = query_list else: self.query_list = [] # To avoid ended session: If token expired then refresh. self.refresh_services() # Update execute date and folder name: self.execute_date = date.today().strftime("%B %d, %Y") print('Today is ' + self.execute_date) self.folder_name = folder_name print('Attachments are uploaded to: "' + self.folder_name + '"') # Update emails_ids matching query_list: self.ids_of_messages_matching_query() # Search for folder ID with given folder name, folder_id = (self.search_for_file_id( "mimeType='application/vnd.google-apps.folder'", self.folder_name)) # Data from emails: attachment_data = self.get_attachments_ids(self.emails_ids) # Save stuff on drive and hard disk: self.save_attachments(attachment_data, folder_id, save=False) def ids_of_messages_matching_query(self): """ List all Messages of the user's mailbox matching the query. Returns: List of Messages that match the criteria of the query. Note that the returned list contains Message IDs, you must use to get the details of a Message. """ matches = [] try: for query in self.query_list: response = self.mail_service.users().messages().list( userId=self.user_id, q=query).execute() if 'messages' in response: matches.extend(response.get('messages', [])) while 'nextPageToken' in response: page_token = response['nextPageToken'] response = self.mail_service.users().messages().list( userId=self.user_id, q=query, pageToken=page_token).execute() matches.extend(response['messages']) except errors.HttpError as error: print('An error occurred: %s' % {error}) # Tricks to remove duplicates, to unpack and strip all unnecessary data: matching_emails = [ dict(tuples) for tuples in {tuple(dictionaries.items()) for dictionaries in matches} ] self.emails_ids = [i['id'] for i in matching_emails] def get_attachments_ids(self, emails_ids): """ Get all attachments IDs from provided emails IDs. Args: emails_ids: IDs of Messages containing attachments. Return: All attachments IDs contained in provided emails_ids in form of dictionary: {'Emails IDs':[], 'Attachments IDs':[], 'Attachments file names':[]}. """ attachments_file_names = [] emails_id = [] attachment_ids = [] try: # Iterate over emails_ids and get their data: mail_data = [ self.mail_service.users().messages().get( userId=self.user_id, id=ids, format='full').execute() for ids in emails_ids ] except errors.HttpError as error: print('An error occurred: %s' % {error}) # If attachment doesn't exist then don't try to get them. print('Emails found: ' + str(len(mail_data))) for email in mail_data: payload = email.get("payload", {}) parts = payload.get("parts", []) for part in parts[1:]: try: filename = part.get('filename') attachment_id = part['body']['attachmentId'] email_id = email['id'] except KeyError: print( 'KeyError in get_attachments_ids: No attachment in email:' ) print('ID: ' + email['id']) else: attachments_file_names.append(filename) attachment_ids.append(attachment_id) emails_id.append(email_id) print('Attachments found: ' + str(len(attachment_ids))) # Return of three lists to iterate over when saving: return { 'Emails IDs': emails_id, 'Attachments IDs': attachment_ids, 'Attachments file names': attachments_file_names } def save_attachments(self, attachment_data, drive_folder_id, save=False): # TODO rework of this function """ Get and save attachments on user GDrive, with option to save them on hard drive. Args: attachment_data: IDs of emails with attachments, attachments IDs and attachment file names. drive_folder_id: ID of folder in GDrive where attachments will be stored. save: Save files on hard disk: True/False. Return: Encoded attachments files. """ files = [] files_amount = 0 # Has to be in range function to be able to iterate over. for i in range(0, len(attachment_data['Attachments IDs'])): try: file = self.mail_service.users().messages().attachments().get( userId=self.user_id, messageId=attachment_data['Emails IDs'][i], id=attachment_data['Attachments IDs'][i]).execute() except errors.HttpError as error: print('An error occurred: %s' % {error}) else: file_data = base64.urlsafe_b64decode( file['data'].encode('UTF-8')) path = attachment_data['Attachments file names'][i] files.append(file_data) if not os.path.splitext(path)[1] == '.jpg' and path: with open(path, 'bw') as f: f.write(file_data) # TODO upload file directly from drive API: Update - not possible for me ATM drive_file = self.py_drive.CreateFile( {'parents': [{ 'id': drive_folder_id }]}) drive_file.SetContentFile(path) drive_file.Upload() files_amount += 1 # Until PyDrive fix upload() method, then workaround to release file - so it can be deleted: drive_file.SetContentFile("nul") if not save: os.remove(path) print('Files saved: ' + str(files_amount)) return files def search_for_file_id(self, type_of_file, name_of_file): """ Output id of file or folder with exact name and matching type. If folder doesn't exist then create one and return it's ID. Args: type_of_file: Query used to filter types of files returned: https://developers.google.com/drive/api/v3/search-files name_of_file: String used to filter messages or folders returned. Eg.- 'from:user@some_domain.com' for Messages from a particular sender. Returns: Id of file or folder. """ try: page_token = None while True: searched_file = self.drive_service.files().list( q=type_of_file, pageSize=100, spaces='drive', fields='nextPageToken, files(id,name)', pageToken=page_token).execute(), if page_token is None: break # If folder doesn't exist and user try to get ID, then create that folder and return ID. # TODO if folder is trashed function should un-trash it if searched_file: for name_value in searched_file[0]['files']: if name_value['name'] == name_of_file: searched_file_id = name_value['id'] return searched_file_id elif type_of_file == "mimeType='application/vnd.google-apps.folder'": searched_file_id = self.create_new_folder(name_of_file, []) return searched_file_id except errors.HttpError as error: print('An error occurred: %s' % {error}) def create_new_folder(self, folder_name, parent_folder_id=None): """ Create folder on Google Drive Args: folder_name: name for created folder parent_folder_id(optional): String used to filter messages returned. Returns: Create folder and return it's ID. """ if not parent_folder_id: folder_metadata = { 'name': folder_name, 'mimeType': 'application/vnd.google-apps.folder' } else: folder_metadata = { 'name': folder_name, 'mimeType': 'application/vnd.google-apps.folder', 'parents': [{ "kind": "drive#fileLink", "id": parent_folder_id }] } try: folder = self.drive_service.files().create(body=folder_metadata, fields='id').execute() # Return folder information: return folder['id'] except errors.HttpError as error: print('An error occurred: %s' % {error}) # Part of old code, but it might get useful some day: @staticmethod def create_message(sender, to, subject, message_text): """ Create a message for an email. Args: sender: Email address of the sender. to: Email address of the receiver. subject: The subject of the email message. message_text: The text of the email message. Returns: An object containing a base64url encoded email object. """ message = MIMEText(message_text) message['to'] = to message['from'] = sender message['subject'] = subject message = base64.urlsafe_b64encode(message.as_bytes()) return {'raw': message.decode('utf-8')} @staticmethod def send_message(mail_service, user_id, message): """ Send an email message. Args: mail_service: Authorized Gmail API instance. user_id: User's email address. The special value "me" can be used to indicate the authenticated user. message: Message to be sent. Returns: Sent Message. """ try: message = (mail_service.users().messages().send( userId=user_id, body=message).execute()) print('Message Id: %s' % message['id']) return message except errors.HttpError as error: print('An error occurred: %s' % {error})
# -*- codin.g: utf-8 -*- """ Created on Mon Jul 20 15:26:17 2019 @author: Zekihan """ from pydrive.auth import GoogleAuth from pydrive.drive import GoogleDrive gauth = GoogleAuth() gauth.LocalWebserverAuth( ) # client_secrets.json need to be in the same directory as the script drive = GoogleDrive(gauth)
class Drive: def __init__(self): # import pydrive import pydrive # authenticate self.mycreds_path = mycreds_path self._authenticate() def _authenticate(self): """ This function, once run, will open up a login window in a web browser. The user must then athenticate via email and password to authorize the API for usage with that particular account. Note that 'mycreds.txt' may just be an empty text file. This function will create the correct dictionary structure in the file upon completion. """ # This function requires a Client_secrets.json file to be in the # working directory. old_cwd = os.getcwd() os.chdir(directory) # import from pydrive.auth import GoogleAuth from pydrive.drive import GoogleDrive from pydrive.files import GoogleDriveFile # load self.gauth = GoogleAuth() self.gauth.LoadCredentialsFile(self.mycreds_path) if self.gauth.credentials is None: # authenticate if credentials are not found self.gauth.LocalWebserverAuth() elif self.gauth.access_token_expired: # refresh credentials if they are expired self.gauth.Refresh() else: # initialize the saved credentials self.gauth.Authorize() # finish self.gauth.SaveCredentialsFile(self.mycreds_path) self.api = GoogleDrive(self.gauth) os.chdir(old_cwd) def _list_folder(self, *args, **kwargs): ''' Legacy. Please use self.list_folder instead! - Blaise 2016.05.14 ''' return self.list_folder(*args, **kwargs) def _upload_file(self, filepath, parentid, overwrite=False, delete_local=False, verbose=True): self._authenticate() title = filepath.split(os.path.sep)[-1] # check if remote file already exists q = {'q': "'{}' in parents and trashed=false".format(parentid)} fs = self.api.ListFile(q).GetList() f = None for fi in fs: # dont want to look at folders if 'folder' in fi['mimeType']: continue if fi['title'] == title: print(title, 'found in upload file') f = fi if f is not None: remove = False statinfo = os.stat(filepath) # filesize different if not int(statinfo.st_size) == int(f['fileSize']): remove = True # modified since creation remote_stamp = f['modifiedDate'].split('.')[0] # UTC remote_stamp = time.mktime(datetime.datetime.strptime(remote_stamp, '%Y-%m-%dT%H:%M:%S').timetuple()) local_stamp = os.path.getmtime(filepath) # local local_stamp += time.timezone # UTC if local_stamp > remote_stamp: remove = True # overwrite toggle if overwrite: remove = True # remove if remove: f.Trash() f = None # upload if f is None: f = self.api.CreateFile({'title': title, 'parents': [{"id": parentid}]}) f.SetContentFile(filepath) f.Upload() f.content.close() if verbose: print('file uploaded from {}'.format(filepath)) # delete local if delete_local: os.remove(filepath) # finish return f['id'] def create_folder(self, name, parentid): """ Create a new folder in Google Drive. Attributes ---------- name : string or list of string Name of new folder to be created or list of new folders and subfolders. parentID : string Google Drive ID of folder that is to be the parent of new folder. Returns ------- string The unique Google Drive ID of the bottom-most newly created folder. """ import time t = time.time() self._authenticate() print(time.time()-t, "Authenticate") t = time.time() # clean inputs if type(name) == str: name = [name] # create parent = parentid for n in name: # check if folder with that name already exists q = {'q': "'{}' in parents and trashed=false and mimeType contains \'folder\'".format(parent)} fs = self.api.ListFile(q).GetList() found = False for f in fs: if f['title'] == n: found = True parent = f['id'] continue if found: continue # if no folder was found, create one f = self.api.CreateFile({'title': n, "parents": [{"id": parent}], "mimeType": "application/vnd.google-apps.folder"}) f.Upload() parent = f['id'] print(time.time()-t, "created", n) t = time.time() return parent def download(self, fileid, directory='cwd', overwrite=False, verbose=True): ''' Recursively download from Google Drive into a local directory. By default, will not re-download if file passes following checks: 1. same size as remote file 2. local file last modified after remote file Parameters ---------- fileid : str Google drive id for file or folder. directory : str (optional) Local directory to save content into. By default saves to cwd. overwrite : bool (optional) Toggle forcing file overwrites. Default is False. verbose : bool (optional)s Toggle talkback. Default is True. Returns ------- pydrive.files.GoogleDriveFile ''' self._authenticate() # get directory if directory == 'cwd': directory = os.getcwd() # get file object f = self.api.CreateFile({'id': fileid}) f_path = os.path.join(directory, f['title']) if f['mimeType'].split('.')[-1] == 'folder': # folder # create folder if not os.path.isdir(f_path): os.mkdir(f_path) # fill contents for child_id in self._list_folder(fileid): self.download(child_id, directory=f_path) else: # single file # check if file exists if os.path.isfile(f_path): remove = False statinfo = os.stat(f_path) # filesize different if not int(statinfo.st_size) == int(f['fileSize']): remove = True # modified since creation remote_stamp = f['modifiedDate'].split('.')[0] # UTC remote_stamp = time.mktime(datetime.datetime.strptime(remote_stamp, '%Y-%m-%dT%H:%M:%S').timetuple()) local_stamp = os.path.getmtime(f_path) # local local_stamp += time.timezone # UTC if local_stamp < remote_stamp: remove = True # overwrite toggle if overwrite: remove = True # remove if remove: os.remove(f_path) else: return f # download f.GetContentFile(f_path) if verbose: print('file downloaded to {}'.format(f_path)) # finish return f def list_folder(self, folderid): # adapted from https://github.com/googledrive/PyDrive/issues/37 # folder_id: GoogleDriveFile['id'] self._authenticate() q = {'q': "'{}' in parents and trashed=false".format(folderid)} raw_sub_contents = self.api.ListFile(q).GetList() return [i['id'] for i in raw_sub_contents] def upload(self, path, parentid, overwrite=False, delete_local=False, verbose=True): ''' Upload local file(s) to Google Drive. Parameters ---------- path : str Path to local file or folder. parentid : str Google Drive ID of remote folder. overwrite : bool (optional) Toggle forcing overwrite of remote files. Default is False. delete_local : bool (optional) Toggle deleting local files and folders once uploaded. Default is False. verbose : bool (optional) Toggle talkback. Default is True. Returns ------- driveid : str Google Drive ID of folder or file uploaded ''' self._authenticate() if os.path.isfile(path): return self._upload_file(path, parentid, overwrite=overwrite, delete_local=delete_local, verbose=verbose) elif os.path.isdir(path): top_path_length = len(path.split(os.path.sep)) for tup in os.walk(path, topdown=False): self._authenticate() folder_path, _, file_names = tup print(folder_path) # create folder on google drive name = folder_path.split(os.path.sep)[top_path_length-1:] folderid = self.create_folder(name, parentid) # upload files for file_name in file_names: p = os.path.join(folder_path, file_name) self._upload_file(p, folderid, overwrite=overwrite, delete_local=delete_local, verbose=verbose) # remove folder if delete_local: os.rmdir(folder_path) # finish return folderid else: raise Exception('path {0} not valid in Drive.upload'.format(path))
def clickMethod(self): Sub1 = (self.Subject_line.text()) file1 = open("C:/Users/shadd/Documents/Subjects.txt", "w+") file1.writelines(Sub1) file1.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Subjects.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Sub.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Sub.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Sub.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Cre1 = (self.Created_Line.text()) file2 = open("C:/Users/shadd/Documents/Created.txt", "w+") file2.writelines(Cre1) file2.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Created.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Cre.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Cre.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Cre.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Due1 = (self.DueDate_Line.text()) file3 = open("C:/Users/shadd/Documents/Due.txt", "w+") file3.writelines(Due1) file3.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Due.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Due.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Due.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Due.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Ass1 = (self.Assignment_textbox.toPlainText()) file4 = open("C:/Users/shadd/Documents/Ass.txt", "w+") file4.writelines(Ass1) file4.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Ass.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Ass.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Ass.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Ass.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Hom1 = (self.Homework_textbox.toPlainText()) file5 = open("C:/Users/shadd/Documents/Hom.txt", "w+") file5.writelines(Hom1) file5.close() file4.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Hom.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Hom.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Hom.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Hom.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Not1 = (self.Notes_Textbox.toPlainText()) file6 = open("C:/Users/shadd/Documents/Notes.txt", "w+") file6.writelines(Not1) file6.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Notes.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Notes.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Hom.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Notes.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload() Notice1 = (self.Notice_textbox.toPlainText()) file7 = open("C:/Users/shadd/Documents/Notice.txt", "w+") file7.writelines(Notice1) file7.close() g_login = GoogleAuth() g_login.LocalWebserverAuth() drive = GoogleDrive(g_login) with open("C:/Users/shadd/Documents/Notice.txt", "r") as fie: file_drive = drive.CreateFile({'title': 'Notice.docx'}) file_drive.SetContentString(fie.read()) file_drive.Upload() file_list = drive.ListFile({ 'q': "title contains 'Notice.docx' and trashed=false" }).GetList() print(file_list[0] ['title']) # should be the title of the file we just created file_id = file_list[0]['id'] # get the file ID folder = drive.ListFile({ 'q': "title = 'something' and trashed=false" }).GetList()[0] # get the folder we just created file = drive.CreateFile({ 'title': "Notice.txt", 'parents': [{ 'id': folder['id'] }] }) file.Upload()
def Authentication(): gauth = GoogleAuth() gauth.LocalWebserverAuth() return GoogleDrive(gauth)
# Add _URL to the end of each. It should be really clear that this # substitution key is for a URL. file_variables = map(lambda x: x + '_URL', file_variables) if len(files) != len(file_variables): raise AssertionError('Programming error: the number of files differs from ' 'the number of Bash variables for files.') gauth = GoogleAuth() if args.auth_type == 'cmdline': gauth.CommandLineAuth() elif args.auth_type == 'browser': # If it tries multiple ports it might not match the registered callback URL, # so only try one. gauth.LocalWebserverAuth(port_numbers=[8080]) else: raise AssertionError( 'Programming error: nothing to do for auth_type value ' '{0}'.format(args.auth_type)) drive = GoogleDrive(gauth) folder_name = "Downloads for Freenet" query = "title='{0}'and mimeType='application/vnd.google-apps.folder'"\ .format(folder_name) file_listing = drive.ListFile({'q': query}).GetList() if len(file_listing) != 1: raise RuntimeError( 'Could not find unique folder with the title "{0}".'.format(
class SListener(StreamListener): def __init__(self, api = None, fprefix = 'streamer'): self.api = api or API() self.counter = 0 self.fprefix = fprefix self.fileString = './streaming_data/' + fprefix + '.' + time.strftime('%Y%m%d-%H%M%S') + '.json' self.output = open(self.fileString, 'w') self.delout = open('./streaming_data/' + 'delete.txt', 'a') ##For Google Drive Integration self.gauth = GoogleAuth() self.gauth.LocalWebserverAuth() self.drive = GoogleDrive(self.gauth) ##For Google Drive Integration def on_data(self, data): if 'in_reply_to_status' in data: self.on_status(data) elif 'delete' in data: delete = json.loads(data)['delete']['status'] if self.on_delete(delete['id'], delete['user_id']) is False: return False elif 'limit' in data: if self.on_limit(json.loads(data)['limit']['track']) is False: return False elif 'warning' in data: warning = json.loads(data)['warnings'] print warning['message'] return false def on_status(self, status): self.output.write(status + "\n") self.counter += 1 if self.counter >= 20000: ##For Google Drive Integration file1 = self.drive.CreateFile() file1.SetContentFile(self.fileString) file1.Upload() # Files.insert() os.remove(self.fileString) ##For Google Drive Integration self.output.close() self.fileString = './streaming_data/' + self.fprefix + '.' + str(time.strftime('%Y%m%d-%H%M%S')) + '.json' self.output = open(self.fileString, 'w') self.counter = 0 return def on_delete(self, status_id, user_id): self.delout.write( str(status_id) + "\n") return def on_limit(self, track): sys.stderr.write(str(track) + "\n") return def on_error(self, status_code): sys.stderr.write('Error: ' + status_code + "\n") return False def on_timeout(self): sys.stderr.write("Timeout, sleeping for 60 seconds...\n") time.sleep(60) return
message: Message to be sent. Returns: Sent Message. """ message = (service.users().messages().send(userId=user_id, body=message).execute()) return message # In[168]: #GOOGLE DRIVE API gauth = GoogleAuth() gauth.LoadCredentialsFile("mycreds.txt") gauth.LocalWebserverAuth( ) # Creates local webserver and auto handles authentication. if gauth.credentials is None: gauth.LocalWebserverAuth() # Authenticate if they're not there elif gauth.access_token_expired: gauth.Refresh() # Refresh them if expired else: gauth.Authorize() # Initialize the saved creds gauth.SaveCredentialsFile( "mycreds.txt") # Save the current credentials to a file drive = GoogleDrive(gauth) # In[169]: #UPLOAD FILE SETTING def newFolder():
def __init__(self): auth = GoogleAuth() auth.LocalWebserverAuth() self.__drive = GoogleDrive(auth)
def signup(self): gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) return drive
import pymysql import time import json import os from pydrive.auth import GoogleAuth from pydrive.drive import GoogleDrive folder_bank = '1LhUXLRN8oXn5GZ7O29dykmduijZeFgj4' bank_done = '1r4dfPe4frIyWFXq8OQO4gCxir0biUU5W' folder_toko = '1dPlHq3lAM-xMtHmKRbVuRElpwhzZZvNt' toko_done = '1-ee0AlZj1Zw3j0hy7j7uXaJiEbNuR0hr' first_boot = 1 gauth = GoogleAuth() gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) def fileOperation(table, data, filename, operation, gauth): try: print("-- PROCESS %s --" % filename) gauth.LocalWebserverAuth() drive = GoogleDrive(gauth) try: filepath = './bankbackup/' + filename with open(filepath, 'r') as f: try: datajson = json.load(f) except:
def startDownloads(songsFolder): gauth = GoogleAuth() # Google Drive authentication gauth.LocalWebserverAuth() # Needed only for initial auth drive = GoogleDrive(gauth) connection = sqlite3.connect('../ChartBase.db') cursor = connection.cursor() cursor.execute('SELECT * FROM links WHERE downloaded=0') links = cursor.fetchall() for link in links: url = link[0] source = link[1] urlDecoded = urllib.parse.unquote(url) domain = re.search(r'.*?://(.*?)/', urlDecoded).group(1) tmpFolder = os.path.join(songsFolder, 'tmp/') if not os.path.exists(tmpFolder): os.mkdir(tmpFolder) if 'drive.google' in domain: try: print(f'downloading from gDrive: {url}') gDriveDownload(drive, urlDecoded, tmpFolder) except (KeyboardInterrupt, SystemExit): if os.path.exists(tmpFolder): print(f'removing tmpFolder due to sysexit: {tmpFolder}') shutil.rmtree(tmpFolder) raise except: cursor.execute( f'UPDATE links SET downloaded=-1 WHERE url="{url}"') connection.commit() if os.path.exists(tmpFolder): print(f'removing tmpFolder due to except: {tmpFolder}') shutil.rmtree(tmpFolder) if os.path.exists(tmpFolder): print(f'importing: {url}') importDownloaded(songsFolder, url, source, connection) print(f'updating in db: {url}') cursor.execute( f'UPDATE links SET downloaded=1 WHERE url="{url}"') connection.commit() else: try: print(f'downloading: {url}') _ = wget.download(urlDecoded, tmpFolder) except (KeyboardInterrupt, SystemExit): if os.path.exists(tmpFolder): print(f'removing tmpFolder due to sysexit: {tmpFolder}') shutil.rmtree(tmpFolder) raise except: cursor.execute( f'UPDATE links SET downloaded=-1 WHERE url="{url}"') connection.commit() if os.path.exists(tmpFolder): print(f'removing tmpFolder due to except: {tmpFolder}') shutil.rmtree(tmpFolder) if os.path.exists(tmpFolder): print(f'importing: {url}') importDownloaded(songsFolder, url, source, connection) print(f'updating in db: {url}') cursor.execute( f'UPDATE links SET downloaded=1 WHERE url="{url}"') connection.commit()