def check_capacity_of_google_drive(): if os.path.isfile("config.json"): f = open("config.json", "r") readJSON = json.load(f) f.close() accountList = readJSON[0]['donation'] total_remain_quota = 0 total_quota = 0 while len(accountList) != 0: service = credentials.get_service(accountList.pop(0)) about = service.about().get().execute() total_quota_in_drive = int(about['quotaBytesTotal']) used_quota = int(about['quotaBytesUsed']) remain_quota = total_quota_in_drive - used_quota total_remain_quota += remain_quota total_quota += 15 human_byte = bytes2human(total_remain_quota) print("\n[SYSTEM] Total remain quota is {0} / {1}.0 GB)".format(human_byte, total_quota)) else: print ("[ERROR ] Doesn't exist config.json")
def print_files_in_shared_folder(account): service = credentials.get_service(account) results = retrieve_all_files(service) folderID = get_shared_folder_id(service, account) cnt = 0 daily_folder_id_list = [] print("\n[SYSTEM] Print file list - '%s'" % account) print (" File Name (File ID)") print (' ------------------------------------------------') if not results: print("[SYSTEM] ------------ Shared folder is empty ------------") else: for item in results: if len(item['parents']) != 0: if item['parents'][0]['id'] == folderID: orignItemType = item['mimeType'] itemType = orignItemType[len(orignItemType)-6:] if itemType == "folder": daily_folder_id_list.append(item['id']) else: print (' {0} ({1})'.format(item['title'], item['id'])) for i in range(0, len(daily_folder_id_list)): if item['parents'][0]['id'] == daily_folder_id_list[i]: print (' {0} ({1})'.format(item['title'], item['id'])) cnt += 1 if cnt == 0: print("[SYSTEM] ------------ Shared folder is empty ------------") print (' ------------------------------------------------\n')
def main(): service = get_service() # prepare gmail API messages = fetch_messages(service) # get all messages from wakatime weekly data = get_wakatime_data(messages) # get wakatime data as a dictionary print("Saving data to 'wakatime.json'...") # write to json file with open('wakatime.json', 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False, indent=4)
def upload_metadata_on_system_log(infoList): # Convert list to dictionary serialized_dict = json.dumps(infoList) dictJSON = ast.literal_eval(serialized_dict) # Create metadata.json f = open('upload.json', 'w') json.dump(dictJSON, f, indent=4) f.close() uploadJSONPath = os.getcwd() service = credentials.get_service("silencenamu")
def delete_all_files_of_all_account(): receivedCredential, accountList = get_credentials_list() for i in range(0, len(accountList)): service = credentials.get_service(accountList[i]) items = get_file_id_in_shared_folder_for_delete(service, accountList[i]) for item in items: delete_file(service, item['id']) print("[DELETE] Deleted files in google drive - '%s'\n" % accountList[i]) if os.path.isfile("metadata.json"): os.remove("metadata.json") print("[DELETE] Deleted metadata.json") else: print ("[ERROR ] Doesn't exist metadata.json")
def print_files_in_account(account): service = credentials.get_service(account) results = service.files().list(maxResults=50).execute() items = results.get('items', []) cnt = 0 print("\n[SYSTEM] Print all file list - '%s'" % account) print (" File Name (File ID)") print (' ------------------------------------------------') if not items: print("[SYSTEM] ------------ Shared folder is empty ------------") else: for item in items: print (' {0} ({1})'.format(item['title'], item['id'])) cnt += 1 if cnt == 0: print("[SYSTEM] ------------ Shared folder is empty ------------") print (' ------------------------------------------------\n') return items
def create_public_folder(account): service = credentials.get_service(account) body = { 'title': "jigsaw", 'mimeType': 'application/vnd.google-apps.folder' } folder = service.files().insert(body=body).execute() permission = { 'value': '', 'type': 'anyone', 'role': 'reader' } # https://developers.google.com/drive/web/manage-sharing service.permissions().insert(fileId=folder['id'], body=permission).execute() print ("[CREATE] Create shared folder in google drive") return folder
def get_file_id_and_name_in_shared_folder(account): service = credentials.get_service(account) results = retrieve_all_files(service) folderID = get_shared_folder_id(service, account) cnt = 0 daily_folder_id_list = [] dict = {} returnList = [] print ("[SYSTEM] Start to get file id and name for UPDATE") if not results: print("[SYSTEM] ------------ Shared folder is empty ------------") else: for item in results: if len(item['parents']) != 0: if item['parents'][0]['id'] == folderID: orignItemType = item['mimeType'] itemType = orignItemType[len(orignItemType)-6:] if itemType == "folder": daily_folder_id_list.append(item['id']) else: dict = {} chunkName = item['title'][12:] dict[chunkName] = item['id'] returnList.append(dict) for i in range(0, len(daily_folder_id_list)): if item['parents'][0]['id'] == daily_folder_id_list[i]: dict = {} chunkName = item['title'][11:] dict[chunkName] = item['id'] returnList.append(dict) cnt += 1 if cnt == 0: print("[SYSTEM] ------------ Shared folder is empty ------------") print ("[SYSTEM] Finished get file id and name for UPDATE - '{0}".format(account)) return returnList
def deleteFile(fileName): if os.path.isfile("metadata.json"): f = open("metadata.json", "r") readJSON = json.load(f) f.close() print ("[SYSTEM] Read metadata, Start remove chunk file") idx = 0 item = 0 while 1: if readJSON[item]['fileName'] == fileName: if readJSON[item]['indexOfChunk'] == idx: lastIdx = readJSON[item]['numberOfChunks'] indexOfDot = fileName.index('.') name = fileName[:indexOfDot] for key_account in readJSON[item]['origin']: account = key_account service = credentials.get_service(account) googleDrive.delete_file(service, readJSON[item]['origin'][account]) log = "delete_" + name + '_' + str(idx) + '_' + account + '_origin' googleDrive.write_log(log) for key_account in readJSON[item]['replication']: account = key_account service = credentials.get_service(account) googleDrive.delete_file(service, readJSON[item]['replication'][account]) log = "delete_" + name + '_' + str(idx) + '_' + account + '_replication' googleDrive.write_log(log) print("[DELETE] Removed chunk({0}) file - {1}\n".format(idx+1, readJSON[item]['chunkName'])) del readJSON[item] idx += 1 item = 0 if idx == lastIdx: print ("[DELETE] Deleted file on google drive - '%s'" % fileName) break else: item += 1 if item > len(readJSON)-1: print ("[ERROR ] This metadata is wrong! Please check your metadata") print (" Doesn't exist metadata of {0} 's chunk({1}) file".format(fileName, idx)) sys.exit(0) else: item += 1 if item > len(readJSON)-1: print ("[ERROR ] This metadata is wrong! Please check your metadata") print (" Doesn't exist metadata of {0} 's chunk({1}) file".format(fileName, idx)) sys.exit(0) if len(readJSON) == 0: os.remove("metadata.json") print ("[DELETE] Delete metadata.json") else: # Create metadata.json f = open('metadata.json', 'w') json.dump(readJSON, f, indent=4) f.close() print ("[SYSTEM] Remove metadata of '%s'" % fileName) else: print("[ERROR ] Doesn't exist metadata.json")
def uploadGoogledrive(accountSortList, account, flag): uploadfilePath = os.getcwd() + "/cache/" service = credentials.get_service(account) folderID = googleDrive.get_shared_folder_id(service, account) two_time = 0 dt_obj = datetime.now() dt_obj = dt_obj.replace(day=dt_obj.day + 7) nowDate = datetime.strftime(dt_obj, '%Y_%m_%d') daily_folder_id = googleDrive.check_daily_folder_and_get_id(service, account, nowDate, folderID) for i in range(0, len(accountSortList)): fn1 = accountSortList[i]['chunkName'] fileName = nowDate + '_' + fn1 garbage_log = account + ' ' + fileName googleDrive.write_garbage_log(garbage_log) if os.path.isfile(uploadfilePath+fn1): # upload_file(업로드된후 파일명, 파일설명, 파일타입, 실제 올릴 파일경로) uploadFile = googleDrive.upload_file(service, daily_folder_id, "%s" % fileName, '', '', uploadfilePath + fn1) if type(uploadFile) == 'NoneType': print ("[ERROR ] Failed upload Chunk({0})- '{1}'".format(accountSortList[i]['indexOfChunk']+1, fn1)) else: if flag == "origin": global infoList log = "upload_" + fn1 + '_' + account + "_origin" googleDrive.write_log(log) if accountSortList[i]['origin'][account] == "null": accountSortList[i]['origin'][account] = uploadFile['id'] accountSortList[i]['uploadTime'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S') infoList.append(accountSortList[i]) print ("[SYSTEM] Stored metadata of original Chunk({0})- '{1}'".format(accountSortList[i]['indexOfChunk']+1, fn1)) elif flag == "replication": global finished_infoList log = "upload_" + fn1 + '_' + account + "_replication" googleDrive.write_log(log) if accountSortList[i]['replication'][account] == "null": accountSortList[i]['replication'][account] = uploadFile['id'] # Delete already uploaded chunk file for key in accountSortList[i]['replication']: if key != account: if accountSortList[i]['replication'][key] != "null": if fileName == uploadFile['title']: two_time += 1 if two_time == 1: two_time = 0 finished_infoList.append(accountSortList[i]) print ("[SYSTEM] Stored metadata of replication of Chunk({0})- '{1}'".format(accountSortList[i]['indexOfChunk']+1, fn1)) os.remove(uploadfilePath+fn1) print ("[DELETE] Remove already uploaded chunk file - '{0}'".format(fn1)) else: print ("[ERROR ] Failed upload Chunk({0})- '{1}'".format(accountSortList[i]['indexOfChunk']+1, fn1))
def delete_all_files_of_one_account(account): service = credentials.get_service(account) items = get_file_id_in_shared_folder_for_delete(service, account) for item in items: delete_file(service, item['id']) print("[DELETE] Deleted files in google drive - '%s'\n" % account)