def authorize(self): self.project.log( "transaction", "Initiating OAUTH2 Protocol with " + self.config['TOKEN_ENDPOINT'], "info", True) key_exists = self.oauth.get(self.key_to_the_kingdom) if not key_exists: self.project.log( "transaction", "No valid {} found...".format(self.key_to_the_kingdom), "warning", True) c_id = self.project.config.get("CLIENT_ID") c_secret = self.project.config.get("CLIENT_SECRET") if not c_id or not c_secret: self.project.log( "transaction", "No CLIENT_ID or CLIENT_SECRET. Asking for user input", "warning", True) IO.put("You must configure your account for OAUTH 2.0") IO.put("Please visit {}".format( self.config["OAUTH_DASHBOARD"])) IO.put("& Create an OAUTH 2 API Application") Common.launch_browser(self.config['OAUTH_DASHBOARD']) client_id = IO.get("{}:".format( self.config["CLIENT_ID_ALIAS"])) client_secret = IO.get("{}:".format( self.config["CLIENT_SECRET_ALIAS"])) self.project.save("CLIENT_ID", client_id) self.project.save("CLIENT_SECRET", client_secret) self.project.log( "transaction", "Received {} and {} from user ({}) ({})".format( self.config['CLIENT_ID_ALIAS'], self.config['CLIENT_SECRET_ALIAS'], client_id, client_secret), "info", True) self.get_access_token(client_id, client_secret) else: self.get_access_token(self.project.config['CLIENT_ID'], self.project.config['CLIENT_SECRET']) else: self.refresh(self.project.config['CLIENT_ID'], self.project.config['CLIENT_SECRET']) self.project.save("OAUTH", self.oauth) self.project.log("transaction", "Authorization completed", "info", True)
def sync(self): d1 = datetime.now() d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header, self.project.threads) if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.files) self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True) self.metadata() for file in self.files: self.project.log("transaction", "Calculating " + file['path'], "info", True) if file['is_dir'] == False: download_uri = lambda f=file: self._get_download_uri(f) metadata_download_uri = self.oauth_provider.config['API_ENDPOINT'] + '/metadata/auto' + file['path'] parentmap = self._get_parent_mapping(file) filetitle = self._get_file_name(file) orig = os.path.basename(file['path']) if filetitle != orig: self.project.log("exception", "Normalized '{}' to '{}'".format(orig, filetitle), "warning", True) if 'bytes' in file: self.file_size_bytes += int(file['bytes']) save_metadata_path = Common.assert_path(os.path.normpath(os.path.join(os.path.join(self.project.project_folders['metadata'], parentmap), filetitle + ".json")), self.project) if save_metadata_path: self.project.log("transaction", "Queueing {} for download...".format(orig), "info", True) d.put(Downloader.DownloadSlip(metadata_download_uri, file, save_metadata_path, 'path')) if self.project.args.mode == "full": save_download_path = Common.assert_path(os.path.normpath(os.path.join(os.path.join(self.project.project_folders['data'], parentmap), filetitle)), self.project) if save_download_path: self.project.log("transaction", "Queueing {} for download...".format(orig), "info", True) d.put(Downloader.DownloadSlip(download_uri, file, save_download_path, 'path')) self.project.log("transaction", "Total size of files to be acquired is {}".format(Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") d.start() d.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def sync(self): d1 = datetime.now() self.d = Downloader.Downloader self.content_downloader = Downloader.Downloader self.meta_downloader = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_metadata, self.oauth_provider.get_auth_header, self.project.threads) if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) self.d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._redirect_messages_to_save, self.oauth_provider.get_auth_header, self.project.threads) self.content_downloader = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_raw_mail, self.oauth_provider.get_auth_header, self.project.threads) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.threads) self.project.log("transaction", "Total threads queued for acquisition: {}".format(cnt), "info", True) self.metadata() for thread in self.threads: self.project.log("transaction", 'Calculating "{}"'.format(thread['snippet']), "info", True) savepath = "" if self.project.args.mode == "full": download_uri = self.get_thread_uri(thread, "minimal") self.d.put(Downloader.DownloadSlip(download_uri, thread, savepath, 'id')) meta_uri = self.get_thread_uri(thread, "metadata") self.meta_downloader.put(Downloader.DownloadSlip(meta_uri, thread, savepath, 'id')) if self.project.args.mode == "full": self.d.start() self.d.wait_for_complete() self.project.log("transaction", "Total size of mail to be acquired is {}".format(Common.sizeof_fmt(self.file_size_bytes,"B")), "highlight", True) self.mbox_dir = os.path.join(self.project.acquisition_dir, "mbox") os.makedirs(self.mbox_dir, exist_ok=True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") if self.project.args.mode == "full": self.content_downloader.start() self.content_downloader.wait_for_complete() self.meta_downloader.start() self.meta_downloader.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def get_access_token(self, client_id, client_secret): response_type = 'code' query_string = {} if self.provider == "google": query_string = ( {'redirect_uri': self.config['REDIRECT_URI'], 'response_type': response_type, 'client_id': client_id, 'scope': self.project.config['OAUTH_SCOPE'], 'approval_prompt': 'force', 'access_type': 'offline'}) elif self.provider == "dropbox": query_string = ({'response_type': response_type, 'client_id': client_id}) params = urllib.parse.urlencode(query_string) step1 = self.config['OAUTH_ENDPOINT'] + '?' + params Common.launch_browser(step1) code = IO.get("Authorization Code:") query_string = ({'code': code, 'grant_type': 'authorization_code', 'client_id': client_id, 'client_secret': client_secret}) if self.provider == "google": query_string['scope'] = '' query_string['redirect_uri'] = self.config['REDIRECT_URI'] params = urllib.parse.urlencode(query_string) response = Common.webrequest(self.config['TOKEN_ENDPOINT'], {'content-type': 'application/x-www-form-urlencoded;charset=utf-8'}, self.http_intercept, params) json_response = json.loads(response) self.parse_token(json_response) self.project.save("OAUTH", self.oauth)
def authorize(self): self.project.log("transaction", "Initiating OAUTH2 Protocol with " + self.config['TOKEN_ENDPOINT'], "info", True) key_exists = self.oauth.get(self.key_to_the_kingdom) if not key_exists: self.project.log("transaction", "No valid {} found...".format(self.key_to_the_kingdom), "warning", True) c_id = self.project.config.get("CLIENT_ID") c_secret = self.project.config.get("CLIENT_SECRET") if not c_id or not c_secret: self.project.log("transaction", "No CLIENT_ID or CLIENT_SECRET. Asking for user input", "warning", True) IO.put("You must configure your account for OAUTH 2.0") IO.put("Please visit {}".format(self.config["OAUTH_DASHBOARD"])) IO.put("& Create an OAUTH 2 API Application") Common.launch_browser(self.config['OAUTH_DASHBOARD']) client_id = IO.get("{}:".format(self.config["CLIENT_ID_ALIAS"])) client_secret = IO.get("{}:".format(self.config["CLIENT_SECRET_ALIAS"])) self.project.save("CLIENT_ID", client_id) self.project.save("CLIENT_SECRET", client_secret) self.project.log("transaction", "Received {} and {} from user ({}) ({})".format(self.config['CLIENT_ID_ALIAS'], self.config['CLIENT_SECRET_ALIAS'], client_id, client_secret), "info", True) self.get_access_token(client_id, client_secret) else: self.get_access_token(self.project.config['CLIENT_ID'], self.project.config['CLIENT_SECRET']) else: self.refresh(self.project.config['CLIENT_ID'], self.project.config['CLIENT_SECRET']) self.project.save("OAUTH", self.oauth) self.project.log("transaction", "Authorization completed", "info", True)
def get_access_token(self, client_id, client_secret): response_type = 'code' query_string = {} if self.provider == "google": query_string = ({ 'redirect_uri': self.config['REDIRECT_URI'], 'response_type': response_type, 'client_id': client_id, 'scope': self.project.config['OAUTH_SCOPE'], 'approval_prompt': 'force', 'access_type': 'offline' }) elif self.provider == "dropbox": query_string = ({ 'response_type': response_type, 'client_id': client_id }) params = urllib.parse.urlencode(query_string) step1 = self.config['OAUTH_ENDPOINT'] + '?' + params Common.launch_browser(step1) code = IO.get("Authorization Code:") query_string = ({ 'code': code, 'grant_type': 'authorization_code', 'client_id': client_id, 'client_secret': client_secret }) if self.provider == "google": query_string['scope'] = '' query_string['redirect_uri'] = self.config['REDIRECT_URI'] params = urllib.parse.urlencode(query_string) response = Common.webrequest(self.config['TOKEN_ENDPOINT'], { 'content-type': 'application/x-www-form-urlencoded;charset=utf-8' }, self.http_intercept, params) json_response = json.loads(response) self.parse_token(json_response) self.project.save("OAUTH", self.oauth)
def sync(self): d1 = datetime.now() self.d = Downloader.Downloader self.content_downloader = Downloader.Downloader self.meta_downloader = Downloader.Downloader( self.project, self.oauth_provider.http_intercept, self._save_metadata, self.oauth_provider.get_auth_header, self.project.threads) if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) self.d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._redirect_messages_to_save, self.oauth_provider.get_auth_header, self.project.threads) self.content_downloader = Downloader.Downloader( self.project, self.oauth_provider.http_intercept, self._save_raw_mail, self.oauth_provider.get_auth_header, self.project.threads) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.threads) self.project.log( "transaction", "Total threads queued for acquisition: {}".format(cnt), "info", True) self.metadata() for thread in self.threads: self.project.log("transaction", 'Calculating "{}"'.format(thread['snippet']), "info", True) savepath = "" if self.project.args.mode == "full": download_uri = self.get_thread_uri(thread, "minimal") self.d.put( Downloader.DownloadSlip(download_uri, thread, savepath, 'id')) meta_uri = self.get_thread_uri(thread, "metadata") self.meta_downloader.put( Downloader.DownloadSlip(meta_uri, thread, savepath, 'id')) if self.project.args.mode == "full": self.d.start() self.d.wait_for_complete() self.project.log( "transaction", "Total size of mail to be acquired is {}".format( Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) self.mbox_dir = os.path.join(self.project.acquisition_dir, "mbox") os.makedirs(self.mbox_dir, exist_ok=True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") if self.project.args.mode == "full": self.content_downloader.start() self.content_downloader.wait_for_complete() self.meta_downloader.start() self.meta_downloader.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def sync(self): d1 = datetime.now() d = Downloader.Downloader if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header, self.project.threads) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.files) self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True) self.metadata() trash_folder = os.path.join(self.project.acquisition_dir, "trash") trash_metadata_folder = os.path.join(self.project.acquisition_dir, "trash_metadata") for file in self.files: self.project.log("transaction", "Calculating " + file['title'], "info", True) download_uri = self._get_download_url(file) parentmap = self._get_parent_mapping(file, self.files) filetitle = self._get_file_name(file) if filetitle != file['title']: self.project.log( "exception", "Normalized '" + file['title'] + "' to '" + filetitle + "'", "warning", True) if file['labels']['trashed'] == True: save_download_path = os.path.join(trash_folder, parentmap) save_metadata_path = os.path.join(trash_metadata_folder, parentmap) save_download_path = os.path.normpath( os.path.join(save_download_path, filetitle)) save_metadata_path = os.path.normpath( os.path.join(save_metadata_path, filetitle + '.json')) else: save_download_path = os.path.normpath( os.path.join( os.path.join(self.project.project_folders["data"], parentmap), filetitle)) save_metadata_path = os.path.normpath( os.path.join( os.path.join(self.project.project_folders["metadata"], parentmap), filetitle + ".json")) save_download_path = Common.assert_path(save_download_path, self.project) save_metadata_path = Common.assert_path(save_metadata_path, self.project) if self.project.args.mode == "full": if save_download_path: v = { "remote_file": os.path.join(parentmap, file['title']), "local_file": save_download_path } download_file = True if 'md5Checksum' in file: v['remote_hash'] = file['md5Checksum'] if os.path.isfile(save_download_path): if 'md5Checksum' in file: file_hash = Common.hashfile( open(save_download_path, 'rb'), hashlib.md5()) if file_hash == file['md5Checksum']: download_file = False self.project.log( "exception", "Local and remote hash matches for " + file['title'] + " ... Skipping download", "warning", True) else: self.project.log( "exception", "Local and remote hash differs for " + file['title'] + " ... Queuing for download", "critical", True) else: self.project.log( "exception", "No hash information for file ' " + file['title'] + "'", "warning", True) if download_file and download_uri: self.project.log( "transaction", "Queueing " + file['title'] + " for download...", "info", True) d.put( Downloader.DownloadSlip(download_uri, file, save_download_path, 'title')) if 'fileSize' in file: self.file_size_bytes += int(file['fileSize']) # If it's a file we can add it to verification file if download_uri: self.verification.append(v) if save_metadata_path: self._save_file( json.dumps(file, sort_keys=True, indent=4), Downloader.DownloadSlip(download_uri, file, save_metadata_path, 'title'), False) self.project.log( "transaction", "Total size of files to be acquired is {}".format( Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") d.start() d.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.verify() self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def sync(self): d1 = datetime.now() d = Downloader.Downloader if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header, self.project.threads) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.files) self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True) self.metadata() trash_folder = os.path.join(self.project.acquisition_dir, "trash") trash_metadata_folder = os.path.join(self.project.acquisition_dir, "trash_metadata") for file in self.files: self.project.log("transaction", "Calculating " + file['title'], "info", True) download_uri = self._get_download_url(file) parentmap = self._get_parent_mapping(file, self.files) filetitle = self._get_file_name(file) if filetitle != file['title']: self.project.log("exception", "Normalized '" + file['title'] + "' to '" + filetitle + "'", "warning", True) if file['labels']['trashed'] == True: save_download_path = os.path.join(trash_folder, parentmap) save_metadata_path = os.path.join(trash_metadata_folder, parentmap) save_download_path = os.path.normpath(os.path.join(save_download_path, filetitle)) save_metadata_path = os.path.normpath(os.path.join(save_metadata_path, filetitle + '.json')) else: save_download_path = os.path.normpath(os.path.join(os.path.join(self.project.project_folders["data"], parentmap), filetitle)) save_metadata_path = os.path.normpath(os.path.join(os.path.join(self.project.project_folders["metadata"], parentmap), filetitle + ".json")) save_download_path = Common.assert_path(save_download_path, self.project) save_metadata_path = Common.assert_path(save_metadata_path, self.project) if self.project.args.mode == "full": if save_download_path: v = {"remote_file": os.path.join(parentmap, file['title']), "local_file": save_download_path} download_file = True if 'md5Checksum' in file: v['remote_hash'] = file['md5Checksum'] if os.path.isfile(save_download_path): if 'md5Checksum' in file: file_hash = Common.hashfile(open(save_download_path, 'rb'), hashlib.md5()) if file_hash == file['md5Checksum']: download_file = False self.project.log("exception", "Local and remote hash matches for " + file[ 'title'] + " ... Skipping download", "warning", True) else: self.project.log("exception", "Local and remote hash differs for " + file[ 'title'] + " ... Queuing for download", "critical", True) else: self.project.log("exception", "No hash information for file ' " + file['title'] + "'", "warning", True) if download_file and download_uri: self.project.log("transaction", "Queueing " + file['title'] + " for download...", "info", True) d.put(Downloader.DownloadSlip(download_uri, file, save_download_path, 'title')) if 'fileSize' in file: self.file_size_bytes += int(file['fileSize']) # If it's a file we can add it to verification file if download_uri: self.verification.append(v) if save_metadata_path: self._save_file(json.dumps(file, sort_keys=True, indent=4), Downloader.DownloadSlip(download_uri, file, save_metadata_path, 'title'), False) self.project.log("transaction", "Total size of files to be acquired is {}".format( Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") d.start() d.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.verify() self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
def sync(self): d1 = datetime.now() d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header, self.project.threads) if self.project.args.mode == "full": self.project.log("transaction", "Full acquisition initiated", "info", True) else: self.project.log("transaction", "Metadata acquisition initiated", "info", True) self.initialize_items() cnt = len(self.files) self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True) self.metadata() for file in self.files: self.project.log("transaction", "Calculating " + file['path'], "info", True) if file['is_dir'] == False: download_uri = lambda f=file: self._get_download_uri(f) metadata_download_uri = self.oauth_provider.config[ 'API_ENDPOINT'] + '/metadata/auto' + file['path'] parentmap = self._get_parent_mapping(file) filetitle = self._get_file_name(file) orig = os.path.basename(file['path']) if filetitle != orig: self.project.log( "exception", "Normalized '{}' to '{}'".format(orig, filetitle), "warning", True) if 'bytes' in file: self.file_size_bytes += int(file['bytes']) save_metadata_path = Common.assert_path( os.path.normpath( os.path.join( os.path.join( self.project.project_folders['metadata'], parentmap), filetitle + ".json")), self.project) if save_metadata_path: self.project.log( "transaction", "Queueing {} for download...".format(orig), "info", True) d.put( Downloader.DownloadSlip(metadata_download_uri, file, save_metadata_path, 'path')) if self.project.args.mode == "full": save_download_path = Common.assert_path( os.path.normpath( os.path.join( os.path.join( self.project.project_folders['data'], parentmap), filetitle)), self.project) if save_download_path: self.project.log( "transaction", "Queueing {} for download...".format(orig), "info", True) d.put( Downloader.DownloadSlip(download_uri, file, save_download_path, 'path')) self.project.log( "transaction", "Total size of files to be acquired is {}".format( Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True) if self.project.args.prompt: IO.get("Press ENTER to begin acquisition...") d.start() d.wait_for_complete() d2 = datetime.now() delt = d2 - d1 self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)