def upload_object(file, path, folder_id, service): file_metadata = {'name': file, 'parents': [folder_id]} media = MediaFileUpload(path + file) response = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print(file + " {" + response.get('id') + "} is uploaded.")
def upload_file(self, file_url, name, parent_folder): file_existence = self.__check_existence(name, parent_folder, self.__TYPE_FILE) if len(file_existence) != 0: return 409 result = requests.get(file_url) result.raise_for_status() with open(pathlib.PurePosixPath(self._TMP_DIR, name).__str__(), 'wb') as f: f.write(result.content) file = MediaFileUpload( pathlib.PurePosixPath(self._TMP_DIR, name).__str__(), mimetype='image/jpeg', resumable=True ) file_metadata = { 'name': f'{name}', 'parents': [parent_folder], } _ = self.service.files().create(body=file_metadata, media_body=file, ).execute() return 200
def upload_to_gdrive(g_folder_id, filename, target_path, target_name, mimetype, listing_exists=False): SCOPES = 'https://www.googleapis.com/auth/drive' target_folder_id = g_folder_id store = g_file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('drive_credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('drive', 'v3', http=creds.authorize(Http())) if listing_exists == False: # CREATE TARGET FOLDER file_metadata = { 'name': target_path, 'parents': [g_folder_id], 'mimeType': 'application/vnd.google-apps.folder' } file = service.files().create(body=file_metadata, fields='id').execute() target_folder_id = file.get('id') print 'Folder ID: %s' % target_folder_id file_metadata = {'name': target_name, 'parents': [target_folder_id]} media = MediaFileUpload(filename, mimetype=mimetype) file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print 'File ID: %s' % file.get('id') return 'https://drive.google.com/file/d/' + file.get('id'), target_folder_id
def upload_file(filename, mimeType): service = authorize() file_metadata = {'name': filename} media = MediaFileUpload(filename, mimetype=mimeType) uploaded_file = service.files().create(body=file_metadata, media_body=media, fields='id').execute()
def uploadMedia(self, media, name='', mimetype='image/png'): media = MediaFileUpload(media, mimetype=mimetype, resumable=True) request = self.lib.create(media_body=media, body={'name': name}) response = None while response is None: status, response = request.next_chunk() return response
def upload(file, email): file_metadata = {'name': email} media = MediaFileUpload(f'{file}', mimetype='text/csv') file = drive_service.files().create(body=file_metadata, media_body=media, fields='id').execute() print('File ID: %s' % file.get('id')) return file.get('id')
def upload(file_path, file_name, folder_id): global service file_metadata = { 'name': file_name, 'parents' : [folder_id] } media = MediaFileUpload(file_path) file = service.files().create(body=file_metadata, media_body=media, fields='id').execute()
def uploadd(filename,filepath,mime): file_metadata = {'name': filename} media = MediaFileUpload(filepath, mimetype=mime) global service file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print ('File ID: %s' % file.get('id'))
def upload(service, file_name): file_metadata = {'name': file_name, 'mimeType': 'application/vnd.google-apps.spreadsheet'} media = MediaFileUpload(file_name, mimetype='text/csv') file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() file_id = file.get('id') return file_id
def upload(self, drive_file): file_metadata = { "name": drive_file.name, "parents": drive_file.parents } media = MediaFileUpload(drive_file.path) new_file = self.files.create(body=file_metadata, media_body=media, fields="id").execute() return new_file
def UploadCsvToGoogleDrive(self, service, folderID): file_metadata = { 'name': self.DataName + '.csv', 'parents': [folderID] } try: media = MediaFileUpload(self.CsvDirectory + self.DataName + '.csv', mimetype='text/csv') service.files().create(body=file_metadata, media_body=media, fields='id').execute() except: print("Failed to upload to drive: " + self.DataName)
def upload_file(drive_service, FILE_PATH, MIME_TYPE): """ Upload a file to Google Drive. x FILE_PATH : path of the file to upload x MIME_TYPE : output format of the file For more informations : https://github.com/odeke-em/drive/wiki/List-of-MIME-type-short-keys """ file_metadata = {'name': 'test.jpeg'} media = MediaFileUpload(FILE_PATH, mimetype=MIME_TYPE) file_service = drive_service.files().create(body=file_metadata, media_body=media, fields='id').execute() print('File ID: %s' % file_service.get('id'))
def upload_file(self, file_name, file_path, parent_id=None): file_metadata = {'name': file_name} if parent_id: file_metadata['parents'] = [parent_id] media = MediaFileUpload(file_path, resumable=True) file = self.files().create(body=file_metadata, media_body=media, fields='id') media.stream() response = None while response is None: try: status, response = file.next_chunk() if status: print("Uploaded %d%% of %s." % (int(status.progress() * 100), file_name)) except Exception as e: print("Error uploading file %s: %s" % (file_name, e)) print("status info:", status) print("Upload of %s Complete!" % file_name) return response
def upload(self, name, file_path, parent_folder, mimetype): service = self.create_service() # set file options such as name and parent directory # this folder id is for `train` folder file_metadata = {'name': name, "parents": parent_folder} # file type. this mimetype is for image/jpeg media = MediaFileUpload(file_path, mimetype=mimetype) # upload to drive with service try: file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() except Exception as err: logging.error(err) raise (err) return file
def upload(self, df): for path, row in df.iterrows(): if row["parent"] is None: raise RuntimeError(f"parent cannot be null for {path}") if row["local_type"] in GoogleDrive.google_mimes: log.info(f'Copying file {path.name}') with path.open() as f: data = json.load(f) log.info(data['file_id']) new_file = self.files.copy( fileId=data["file_id"], body={ "name": row["cloud_name"], "parents": [row["parent"]] }, fields="id, webViewLink", ).execute() with path.open("w") as f: json.dump( { "url": new_file['webViewLink'], "account_email": self.email_address, "file_id": new_file["id"], }, f, ) elif row["local_type"] == "folder": log.info(f'Creating folder {path.name}') new_file = self.files.create( body={ "name": row["cloud_name"], "mimeType": get_mime("folder"), "parents": [row["parent"]], }, fields="id", ).execute() else: log.info(f'Uploading file {path.name}') media = MediaFileUpload(path) new_file = self.files.create(body={ "name": row["cloud_name"], "parents": [row["parent"]] }, media_body=media, fields="id").execute() df.at[path, "id"] = new_file["id"] return df
def upload(cls, fpath, title, description=None, mimetype=None, folder=None): try: if os.path.exists(fpath): body = { 'path': fpath, 'name': fpath.split("\\")[len(fpath.split("\\")) - 1], 'title': title, 'description': None, 'mimetype': None, 'parents': None } if description: body['description'] = description if mimetype: body['mimetype'] = mimetype if folder: body['parents'] = folder media = MediaFileUpload(body.get('path'), resumable=True) if cls.drive_service.files().get(fileId=cls.search( folder)[0].get('id')).execute() == None: if body.get('parents') != None: body['parents'] = [cls.create_folder(folder).get('id')] else: body['parents'] = [cls.search(folder)[0].get('id')] upload = cls.drive_service.files().create( body=body, media_body=media, fields='id').execute() # Uncomment the following line to print the File ID print('File ID: %s' % upload.get('id')) return upload else: raise FileNotFoundError( str(fpath) + "does not exist in the given folder") except errors.HttpError as error: print("An error occurred during uploading: " + str(error)) print_exc() except: print_exc()
def upload_file(file_object, file_data, credentials, path=False): service = build('drive', 'v3', credentials=credentials) # print(file_object) folder_metadata = { 'name': 'My Test Folder', 'mimeType': 'application/vnd.google-apps.folder' } cloudFolder = service.files().create(body=folder_metadata).execute() file_metadata = { 'name': file_data["file_name"], 'parents': [cloudFolder['id']] } try: if path: media = MediaFileUpload(file_object, mimetype=file_data["mimetype"], resumable=True) else: media = MediaIoBaseUpload(io.BytesIO(file_object), mimetype=file_data["mimetype"], resumable=True) cloudFile = service.files().create(body=file_metadata, media_body=media).execute() # print(cloudFile) file_id = cloudFile['id'] userEmail = "*****@*****.**" cloudPermissions = service.permissions().create(fileId=cloudFile['id'], body={ 'type': 'user', 'role': 'reader', 'emailAddress': userEmail }).execute() cp = service.permissions().list(fileId=cloudFile['id']).execute() # print(cp) except Exception as e: print(e) file_id = None return file_id
def upload_files(self, filenames): all_files = self.list_files() for file in all_files["files"]: if (file["mimeType"] != "application/vnd.google-apps.folder"): self.backupFile(file["id"]) for filename in filenames: file_metadata = { "name": filename, 'mimeType': 'image/jpg', "parents": [self.folderid] } media = MediaFileUpload(filename, mimetype='text/csv', resumable=True) file = self.service.files().create(body=file_metadata, media_body=media, fields='id').execute()
def main(): """Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to. """ creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('drive', 'v3', credentials=creds) # Call the Drive v3 API page_token = None while True: response = service.files().list(q="'root' in parents and trashed=false", spaces='drive', fields='nextPageToken, files(id, name, mimeType, webViewLink)', pageToken=page_token).execute() for file in response.get('files', []): # Process change print('Found file: %s (%s) (%s) (%s)' % (file['name'], file['id'], file['mimeType'], file['webViewLink'])) page_token = response.get('nextPageToken', None) if page_token is None: break file_metadata = {'name': 'movies.json'} media = MediaFileUpload('movies.json', mimetype='application/json') file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print('File ID: %s' % file.get('id'))
def main(): """Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to. """ creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('drive', 'v3', credentials=creds) mydir = 'c:/orange/customers.' listOfFiles = os.listdir(mydir) pattern = "*.csv" folder_ids = ['1nA51BY05rIvHwaGiTxsqUQ5BxCtxmgOY'] for entry in listOfFiles: if fnmatch.fnmatch(entry, pattern): date_file_name = entry[0:8] + 'customer.csv' file_metadata = { 'name': date_file_name, 'mimeType': 'application/vnd.google-apps.spreadsheet', 'parents': folder_ids } path = os.path.join(mydir, entry) media = MediaFileUpload(path, mimetype='text/csv') file = service.files().create(body=file_metadata, media_body=media, fields='id').execute() print('File ID: %s' % file.get('id'))
def uploadFile(self, _filename, _mimeType): try: file_metadata = { 'name': _filename, 'parents': ["17NNaQFAd3YvUDGD0eWfSxWUyAw0dmi0c"] } media = MediaFileUpload(_filename, mimetype=_mimeType) fileUpload = self.service.files().create(body=file_metadata, media_body=media, fields='id').execute() if fileUpload: print('File ID: %s' % fileUpload.get('id')) os.remove(_filename) return 1 else: self.__init__() self.uploadFile(_filename, _mimeType) except: self.__init__() self.uploadFile(_filename, _mimeType)
def upload_files(filenames): """Basic usage of the Drive v3 API. Upload files to gdrive """ log("Authorising google drive...") creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) drive_service = build('drive', 'v3', credentials=creds) folder_id = os.environ.get("DRIVE_FOLDER_ID") log("Uploading files to google drive...") for filename in filenames: name = filename.split('\\').pop() file_metadata = {'name': name, 'parents': [folder_id]} media = MediaFileUpload(filename, mimetype='image/png') file = drive_service.files().create(body=file_metadata, media_body=media, fields='id').execute() print('Uploaded %s, File Id: %s' % (name, file.get('id'))) log("Uploading complete.")
def insert_file(service, title, description, parent_id, mime_type, filename): """Insert new file. Args: service: Drive API service instance. title: Title of the file to insert, including the extension. description: Description of the file to insert. parent_id: Parent folder's ID. mime_type: MIME type of the file to insert. filename: Filename of the file to insert. Returns: Inserted file metadata if successful, None otherwise. """ body = {'title': title, 'description': description, 'mimeType': mime_type} media_body = None if not filename is None: media_body = MediaFileUpload(filename, mimetype=mime_type) pprint.pprint(body) pprint.pprint(media_body) # Set the parent folder. if parent_id: body['parents'] = [{'id': parent_id}] try: file = None if filename == None: file = service.files().insert(body=body).execute() else: file = service.files().insert(body=body, media_body=media_body).execute() # Uncomment the following line to print the File ID # print 'File ID: %s' % file['id'] return file except errors.HttpError, error: print 'An error occured: %s' % error return None
def save_report(self, root_dir, data, saved_dir_name): file_name = f'Log_{datetime.now().strftime("%d-%m-%Y_%H-%M-%S")}.txt' file_path = pathlib.PurePosixPath(self._DEFAULT_BACKUP_DIR, str(saved_dir_name), str(file_name)).__str__() print(f'Saving Log File:\nLocaly: {file_name}\nRemotely: {file_path}') if not os.path.exists(self._LOG_DIR): os.mkdir(self._LOG_DIR) with open(pathlib.PurePosixPath(self._LOG_DIR, file_name).__str__(), 'w') as f: f.write(json.dumps(data)) file = MediaFileUpload( pathlib.PurePosixPath(self._LOG_DIR, file_name).__str__(), mimetype='text/plain', resumable=True ) file_metadata = { 'name': f'{file_name}', 'parents': [root_dir], } _ = self.service.files().create(body=file_metadata, media_body=file, ).execute()
def drive_upload(args, file_path, file_name, camera, now): folder_date = now.strftime('%Y%m%d') credentials = service_account.Credentials.from_service_account_file( args.credentials, scopes=SCOPES) service = build('drive', 'v3', credentials=credentials) dir_list_result = service.files().list( q="name=\'{}\' and \'{}\' in parents".format(folder_date, camera['parents'][0]), fields='files(id, name)').execute() if not dir_list_result.get('files', []): file_metadata = { 'name': folder_date, 'mimeType': 'application/vnd.google-apps.folder', 'parents': camera['parents'] } dir_create_result = service.files().create(body=file_metadata, fields="id").execute() parent = dir_create_result.get('id', {}) else: parent = dir_list_result.get('files', [])[0]['id'] if service: media = MediaFileUpload(file_path, mimetype='image/jpeg') print(parent) results = service.files().create(media_body=media, body={ "name": file_name, "parents": [parent] }).execute() if results: return results else: return False
"codeblock": "{" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" " 'error_code':'beftn5009'" "}" } ] } doc.render(context) doc.save("generated_doc.docx") body = {'name': 'TestDocX', 'mimeType': 'application/vnd.google-apps.document'} DriveService = BuildService() media = MediaFileUpload( "generated_doc.docx", mimetype= 'application/vnd.openxmlformats-officedocument.wordprocessingml.document') DocService = DriveService.files().create(body=body, media_body=media, fields='id').execute()
"url": Template_JSON[4]["URL_STRING"], "verb": Template_JSON[4]["HTTP_VERB"], "codeblock": json.dumps(Template_JSON[4]["RESPONSE_BODY"], indent=4, sort_keys=True) }] rendered_template = template.render( Doc={'title': 'Generated Documentation Example'}, endpointlist=api_endpoints) print(rendered_template) with open("some_new_file4.html", "w") as f: f.write(rendered_template) f.close() body = { 'name': 'GeneratedDocumentation', 'mimeType': 'application/vnd.google-apps.document' } DriveService = BuildService() media = MediaFileUpload("some_new_file4.html", mimetype='text/html') DocService = DriveService.files().create(body=body, media_body=media, fields='id').execute() # media = MediaFileUpload("C:\\Users\\Circle\\Desktop\\Test_Template_doc.docx", mimetype='application/vnd.openxmlformats-officedocument.wordprocessingml.document') # DocService = DriveService.files().create(body=body,media_body=media,fields='id').execute() print(DocService)
type=str, help='Name of the file to be backed up') args = parser.parse_args() SCOPES = ['https://www.googleapis.com/auth/drive.file'] SERVICE_ACCOUNT_FILE = './credentials2.json' creds = service_account.Credentials.from_service_account_file( SERVICE_ACCOUNT_FILE, scopes=SCOPES) if __name__ == "__main__": file_metadata = { 'name': args.f, 'description': 'Scheduled server backup', 'mimeType': 'application/tar' } service = build('drive', 'v3', credentials=creds) media = MediaFileUpload(args.f, mimetype='application/tar') cloudFile = service.files().create(body=file_metadata, media_body=media).execute() cloudPermissions = service.permissions().create(fileId=cloudFile['id'], body={ 'type': 'user', 'role': 'reader', 'emailAddress': '*****@*****.**' }).execute()
def upload_file(self, file_id, file_name, file_path, file_gid=None, parent_gid=None, progress_queue=Queue()): print("uploading file at:", file_path) self.cancel_uploads[file_id] = False file_metadata = {'name': file_name} if parent_gid and not file_gid: file_metadata['parents'] = [parent_gid] max_retries = 5 while max_retries != 0: try: os.rename(file_path, file_path) break except Exception as e: # File was probably in use so create a new upload request print("file still in use, waiting...", e) time.sleep(.5) max_retries -= 1 file = None media = MediaFileUpload(file_path, resumable=True) if file_gid: file = self.files().update( fileId=file_gid, body=file_metadata, media_body=media, fields='id' ) else: file = self.files().create( body=file_metadata, media_body=media, fields='id') media.stream() progress = 0 response = None progress_queue.put({"progress": 0, "in_failure": False}) fail_count = 0 while response is None and not self.is_canceled(file_id) and fail_count < 10: try: status, response = file.next_chunk() if status: progress = status.progress() print("Uploaded %d%% of %s." % (int(progress * 100), file_name)) progress_queue.put({"progress": progress, "in_failure": False}) except Exception as e: print("Error uploading file %s: %s" % (file_name, e)) progress_queue.put({"progress": progress, "in_failure": True}) time.sleep(.5) if type(e) is HttpError: media = MediaFileUpload(file_path, resumable=True) file = self.files().create( body=file_metadata, media_body=media, fields='id') media.stream() progress_queue.put({"progress": 0, "in_failure": True}) fail_count += 1 progress_queue.put(False) return response
def update_file(service, file_name, file_id): media = MediaFileUpload(file_name) # create for uploading, - fileId + metadata service.files().update(media_body=media, fileId=file_id).execute() print('Updated file "' + file_name + '"')