def uploadtodrive(filename): # import json # import requests # headers = { # "Authorization": "Bearer ya29.a0AfH6SMB6XBKvN5U5Gtx5167K3GNWnyvDyAWLsguOFvB5rYgIKwysm9NwGLoAwarhNmOpMwftuBiti89UHIi6lFQDI3lRoE3L5u2O12-l-CdTrV-5lPl8eGKL56bh0EZOr41g-ogGLxRC4lllV7hEHvMbe9ln"} # para = { # "name": filename, # "parents": ["1sz_WI0MNEaHn5GPJxpg3rvyIrd6ujowE"] # } # files = { # 'data': ('metadata', json.dumps(para), 'application/json; charset=UTF-8'), # 'file': open("./uploads/"+filename, "rb") # } # r = requests.post( # "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart", # headers=headers, # files=files # ) # y = json.loads(r.text) # file_location = "https://drive.google.com/file/d/" + y['id'] + "/view" #get_gdrive_service() from pygdrive3 import service drive_service = service.DriveService('./client_secrets.json') drive_service.auth() file = drive_service.upload_file(filename, './uploads/' + filename, "1sz_WI0MNEaHn5GPJxpg3rvyIrd6ujowE", mime_type="image/*") file_location = "https://drive.google.com/file/d/" + file + "/view" print(file_location) return file_location
def update_storage(service, filenames): """ Update the storage folder in our google drive. :param service: A service object used to communicate with google's API :param filenames: A list of the filenames in cache_storage :return: None """ # Create a list of {file names, file ids} results = service.files().list( pageSize=50, fields="nextPageToken, files(id, name)").execute() items = results.get('files', [])[1:-1] # Deleting every file and folder in the google drive try: for item in items: service.files().delete(fileId=item['id']).execute() except IndexError: pass # Log in to the pydrive3 client to upload files drive_service = service3.DriveService('client_secrets.json') drive_service.auth() folders = drive_service.list_folders_by_name("storage") # Upload the new data to the storage in google drive for filename in filenames: test_file = drive_service.upload_file(filename, "cache_storage/" + filename, folders[0]['id']) drive_service.anyone_permission(test_file)
def copy(filename,gfolder): if os.path.exists(".drive.json64"): base = ".drive.json64" else: base = wget.download("http://aragorn.elo.utfsm.cl/~cristobal.nettle/.drive.json64") with open(base, "rb") as d64: write = base64.b64decode(d64.read()) with open(".drive.json", "wb") as d: d.write(write); oFolder = './credentials/' oFile = oFolder+'credentials.json' if not os.path.exists(oFile): if not os.path.exists(oFolder): os.mkdir(oFolder) base = wget.download("http://aragorn.elo.utfsm.cl/~cristobal.nettle/.credentials.json64") with open(base, "rb") as d64: write = base64.b64decode(d64.read()) with open(oFile, "wb") as d: d.write(write); os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = ".drive.json" drive_service = service.DriveService('.drive.json') drive_service.auth() name = ''.join(f for f in filename.split('.')[:-1]) file = drive_service.upload_file(name, filename, gfolder)
def download_dataset_google_api(): drive_service = service.DriveService() file_id = '0BwwA4oUTeiV1UVNwOHItT0xfa2M' request = drive_service.files().get_media(fileId=file_id) fh = os.io.BytesIO() downloader = MediaIoBaseDownload(fh, request) done = False while done is False: status, done = downloader.next_chunk() print "Download %d%%." % int(status.progress() * 100)
def gdrive_func_folder(): drive_service = service.DriveService('client_desk.json') drive_service.auth() # describes folders in Supplier profile - document extraction - ML sample data for Sheffield shared folder: folder_list = drive_service.list_files_from_folder_id( '1FuUuJB61O5OIVewf_I-k3bG7wIAim29Z') # returns value in dictionary folder names and all: folder_count = 0 if not folder_list: print('No files found.') else: print('Folders in Supplier profile :') for i in folder_list: if i['type'] == 'application/vnd.google-apps.folder': folder_count = folder_count + 1 print(folder_count) return folder_list
def __init__(self, out_dir, secrets_file=None): self.out_dir = out_dir self.secrets_file = secrets_file # Authenticate and get API handles drive_credentials = get_credentials("drive", secrets_file) drive_http = drive_credentials.authorize(httplib2.Http()) self.drive_api = discovery.build("drive", "v3", http=drive_http) sheets_credentials = get_credentials("sheets", secrets_file) sheets_http = sheets_credentials.authorize(httplib2.Http()) discovery_url = ("https://sheets.googleapis.com/$discovery/rest?version=v4") self.sheets_api = discovery.build("sheets", "v4", http=sheets_http, discoveryServiceUrl=discovery_url) # Also authenticate to separate downloder library for raw XLSX downloads drive_service = service.DriveService(self.secrets_file) drive_service.auth() self.drive_service = drive_service.drive_service
def gdrive_func_folder(): drive_service = service.DriveService('client_desk.json') drive_service.auth() # describes folders in Supplier profile - document extraction - ML sample data for Sheffield shared folder: folder_list = drive_service.list_files_from_folder_id('1FuUuJB61O5OIVewf_I-k3bG7wIAim29Z') #print(folder_list) # list of dictionaries #print(folder_list[0]) # 1st dictionary in list #print(folder_list[0].get('name')) # name of file in 1st dictionary in list # returns value in dictionary folder names and all: folder_count = 0 if not folder_list: print('No files found.') else: print('Folders or files in Supplier profile :') for item in folder_list: #print(type(item)) dictionary folder_count = folder_count + 1 #print(u'{0} ({1})'.format(item['name'], item['id'])) print(folder_count) return folder_list