def get_dropbox_client(access_key): """ Attempts to create the Dropbox Client with the associated """ try: client = Dropbox(access_key) client.users_get_current_account() return client except AuthError as e: print(f'Failed to authenticate using key {access_key}') raise (e)
def display(): if 'access_token' not in session: abort(400) access_token = session['access_token'] if 'job' in session: job = get_job_from_key(session['job'], conn) # Only rely on a previous result if the same user is logged in (same access_token) if job is not None and access_token == job.meta.get('access_token', None): return render_template('display.html', username=session['username'], quota=session['quota'], used=session['used']) try: client = Dropbox(access_token) except Exception: abort(401) account = client.users_get_current_account() session['username'] = account.name.display_name space_usage = client.users_get_space_usage() allocated, used = get_space_usage_info(space_usage) total_bytes = used session['used'] = human_readable(used) session['quota'] = human_readable(allocated) job = q.enqueue(walk_entire_dropbox, access_token, total_bytes) job.meta['access_token'] = access_token job.save() update_progress(job, 0, "/") session['job'] = job.key return render_template('display.html', username=session['username'], quota=session['quota'], used=session['used'])
def authenticate(self, **credentials): #TODO user_id comes in here? account_id, access_token = credentials.get('account_id'), credentials.get('access_token') client = Dropbox(access_token) info = client.users_get_current_account() # Django User object has a max length of 30, so we can't store the account_id which is longer # So let's just save it as a hash account_id_hash = str(binascii.crc32(account_id)) try: user = User.objects.get(username=account_id_hash) except User.DoesNotExist: user = User.objects.create(username=account_id_hash, password='******', last_name=info.name.display_name, email=info.email, is_active=False) DropBoxInfo.objects.create(user=user, access_token=access_token) send_mail('A new Metabotnik user has registered', 'And the user %s is https://metabotnik.com/admin/auth/user/%s/' % (user.last_name, user.pk), '*****@*****.**', ['*****@*****.**'], fail_silently=True) return user
def display(): if 'access_token' not in session: abort(400) access_token = session['access_token'] if 'job' in session: job = get_job_from_key(session['job'], conn) # Only rely on a previous result if the same user is logged in (same access_token) if job is not None and access_token == job.meta.get( 'access_token', None): return render_template('display.html', username=session['username'], quota=session['quota'], used=session['used']) try: client = Dropbox(access_token) except Exception: abort(401) account = client.users_get_current_account() session['username'] = account.name.display_name space_usage = client.users_get_space_usage() allocated, used = get_space_usage_info(space_usage) total_bytes = used session['used'] = human_readable(used) session['quota'] = human_readable(allocated) job = q.enqueue(walk_entire_dropbox, access_token, total_bytes) job.meta['access_token'] = access_token job.save() update_progress(job, 0, "/") session['job'] = job.key return render_template('display.html', username=session['username'], quota=session['quota'], used=session['used'])
class DropboxStorage(Storage): """ A storage class providing access to resources in a Dropbox folder. """ def __init__(self, token=ACCESS_TOKEN, location=ROOT_FOLDER): if not token: raise ImproperlyConfigured("You must configure an access token at " "'settings.DROPBOX_ACCESS_TOKEN'.") self.client = Dropbox(token) self.account_info = self.client.users_get_current_account() self.location = location or DEFAULT_ROOT_FOLDER self.base_url = 'https://dl.dropboxusercontent.com/' def _get_abs_path(self, name): return os.path.realpath(os.path.join(self.location, name)) def _open(self, name, mode='rb'): name = self._get_abs_path(name) remote_file = DropboxFile(name, self, mode=mode) return remote_file def _save(self, name, content): name = self._get_abs_path(name) directory = os.path.dirname(name) if not self.exists(directory) and directory: self.client.files_create_folder(directory) # response = self.client.files_get_metadata(directory) # if not response['is_dir']: # raise IOError("%s exists and is not a directory." % directory) abs_name = os.path.realpath(os.path.join(self.location, name)) self.client.files_upload(content.read(), abs_name) return name def delete(self, name): name = self._get_abs_path(name) try: self.client.files_delete(name) except ApiError as e: if isinstance(e.error, DeleteError)\ and e.error.is_path_lookup()\ and e.error.get_path_lookup().is_not_found(): # not found return False # error raise e # deleted return True def exists(self, name): name = self._get_abs_path(name) try: self.client.files_get_metadata(name) except ApiError as e: if hasattr(e.error, 'is_path')\ and e.error.is_path()\ and e.error.get_path().is_not_found(): # not found return False # error raise e # found return True def listdir(self, path): path = self._get_abs_path(path) response = self.client.files_list_folder(path) directories = [] files = [] for entry in response.entries: if isinstance(entry, FolderMetadata): directories.append(os.path.basename(entry.path_display)) elif isinstance(entry, FileMetadata): files.append(os.path.basename(entry.path_display)) return directories, files def size(self, name): name = self._get_abs_path(name) return self.client.files_get_metadata(name).size def url(self, name): name = self._get_abs_path(name) return self.client.files_get_temporary_link(name).link def modified_time(self, name): name = self._get_abs_path(name) return self.client.files_get_metadata(name).server_modified def accessed_time(self, name): name = self._get_abs_path(name) # Note to the unwary, this is actually an mtime return self.client.files_get_metadata(name).client_modified def get_available_name(self, name, max_length=None): """ Returns a filename that's free on the target storage system, and available for new content to be written to. """ name = self._get_abs_path(name) dir_name, file_name = os.path.split(name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, add an underscore and a number (before # the file extension, if one exists) to the filename until the generated # filename doesn't exist. count = itertools.count(1) while self.exists(name): # file_ext includes the dot. _fn = "%s_%s%s" % (file_root, count.next(), file_ext) name = os.path.join(dir_name, _fn) return name
class DropBox(QtGui.QMainWindow): def __init__(self): super().__init__() self.central_widget = QtGui.QStackedWidget() self.setCentralWidget(self.central_widget) self.setWindowTitle('DB PY-IIC') self.resize(700, 550) self.move(300, 100) # Options Widget self.widgetOptions = widgetOptions() self.central_widget.addWidget(self.widgetOptions) # Login Widget self.widgetLogin = widgetLogin() self.widgetLogin.pushButton.clicked.connect(self.authenticate) self.central_widget.addWidget(self.widgetLogin) # Verifies if user has the autorization if 'db.token' not in listdir(): self.widgetOptions.hide() self.widgetLogin.show() else: if self.load_token(): self.widgetLogin.hide() self.widgetOptions.show() self.widgetOptions.dbx = self.dbx self.widgetOptions.thread.start() else: self.widgetLogin.show() self.widgetOptions.hide() def authenticate(self): auth_code = self.widgetLogin.lineEdit.text() try: access_token, user_id = self.widgetLogin.auth_flow.finish( auth_code) self.dbx = Dropbox(access_token) user_info = self.dbx.users_get_current_account() self.widgetOptions.labelName.setText(user_info.name.display_name) self.widgetLogin.hide() self.widgetOptions.show() self.widgetOptions.dbx = self.dbx self.widgetOptions.thread.start() self.save_token(access_token) except: self.widgetLogin.labelError.setText('Invalid code, try again.') def save_token(self, token): with open('db.token', 'wb') as file: pickle.dump(token, file) def load_token(self): with open('db.token', 'rb') as file: try: access_token = pickle.load(file) self.dbx = Dropbox(access_token) user_info = self.dbx.users_get_current_account() self.widgetOptions.labelName.setText( user_info.name.display_name) return True except: return False
# redirect_to("/dropbox-auth-start") # except CsrfException as e: # http_status(403) # except NotApprovedException as e: # flash('Not approved? Why not?') # return redirect_to("/home") # except ProviderException as e: # logger.log("Auth error: %s" % (e,)) # http_status(403) from dropbox import DropboxOAuth2FlowNoRedirect from dropbox import Dropbox APP_KEY = 'cbm74gzdx3jn00g' APP_SECRET = 'chq2mprrc8ldtfg' auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET) authorize_url = auth_flow.start() print("1. Go to: " + authorize_url) print("2. Click \"Allow\" (you might have to log in first).") print("3. Copy the authorization code.") auth_code = input("Enter the authorization code here: ").strip() try: access_token, user_id = auth_flow.finish(auth_code) except Exception as e: print('Error: %s' % (e, )) dbx = Dropbox(access_token) print(dbx.users_get_current_account())
class Client(ApiConnection): def __init__(self, secrets=None, base=None, **kwargs): # update token from the environment name = self._update_secrets() super(ApiConnection, self).__init__(**kwargs) def _speak(self): '''if you want to add an extra print (of a parameter, for example) for the user when the client initalizes, write it here, eg: bot.info('[setting] value') ''' if hasattr(self, 'account'): bot.info('connected to %s' %self.account.name.display_name) def _get_metadata(self, image_file=None, dbx_metadata=None): '''this is a wrapper around the main client.get_metadata to first parse a Dropbox FileMetadata into a dicionary, then pass it on to the primary get_metadata function. Parameters ========== image_file: the full path to the image file that had metadata extracted metadata: the Dropbox FileMetadata to parse. ''' metadata = dict() if dbx_metadata is not None: for key in dbx_metadata.__dir__(): value = getattr(dbx_metadata, key) if type(value) in [str, datetime.datetime, bool, int, float]: metadata[key.strip('_')] = value return self.get_metadata(image_file, names=metadata) def _update_secrets(self): '''update secrets will look for a dropbox token in the environment at SREGISTRY_DROPBOX_TOKEN and if found, create a client. If not, an error message is returned and the client exits. ''' # Retrieve the user token. Exit if not found token = self._get_and_update_setting('SREGISTRY_DROPBOX_TOKEN') if token is None: bot.error('You must export SREGISTRY_DROPBOX_TOKEN to use client.') sys.exit(1) # Create the dropbox client self.dbx = Dropbox(token) # Verify that the account is valid try: self.account = self.dbx.users_get_current_account() except AuthError as err: bot.error('Account invalid. Exiting.') sys.exit(1) def __str__(self): return type(self) def exists(self, path): '''determine if a path exists, return False if not.''' try: self.dbx.files_get_metadata(path) return True except: return False
class DropboxServiceClass: """This class manages the authorization and managment of a user's dropbox account. Args: dropbox: A dropbox object with the ability to access a user's Dropbox account """ def __init__(self): self.dropbox = None access_token = self.get_access_token() if access_token != '' or access_token != 'Error': self.dropbox = Dropbox(access_token) def get_access_token(self): """Reads the developer's access token from the Dropbox Json file This access token should only be used for development purposes Returns: THe developer's access token """ access_token = '' developer_access_token_filename = 'data/DropboxData.json' try: with open(developer_access_token_filename) as dropbox_data: dropbox_data_dict = json.load(dropbox_data) access_token = dropbox_data_dict['Access Token'] except FileNotFoundError as error: access_token = 'Error' return access_token def get_user_account_info(self): """Returns a user's account information in a dictionary format """ return self.dropbox.users_get_current_account() def _files_upload_wrapper(self, in_file_pointer, dropbox_file_path): # We use WriteMode=overwrite to make sure that the settings in the file # are changed on upload return_message = None print("Uploading " + in_file_pointer + " to Dropbox as " + dropbox_file_path + "...") try: self.dropbox.files_upload(in_file_pointer.read(), dropbox_file_path, mode=WriteMode('overwrite')) except ApiError as err: # This checks for the specific error where a user doesn't have # enough Dropbox space quota to upload this file if (err.error.is_path() and err.error.get_path().reason.is_insufficient_space()): return_message = 'Cannot back up; insufficient space.' elif err.user_message_text: return_message = '{}'.format(err.user_message_text) else: return_message = '{}'.format(err) return return_message def save_to_dropbox(self, localFilePath, dropbox_file_path='/my-file-backup.txt'): return_message = None try: with open(localFilePath, 'rb') as read_file: return_message = self._files_upload_wrapper( read_file, dropbox_file_path) except FileNotFoundError as error: return_message = 'Local file not found' return return_message
from dropbox import Dropbox, files from dotenv import load_dotenv from glob import glob from datetime import datetime from os import path, environ, remove load_dotenv() token = environ['DROPBOX_TOKEN'] if __name__ == '__main__': dbx = Dropbox(token) dbx.users_get_current_account() now = datetime.now() csv_list = glob('csv/*.csv') for file_path in csv_list: with open(file_path, 'rb') as file: dbx.files_upload(file.read(), f'/{path.basename(file_path)}', mode=files.WriteMode.overwrite) delta = now - datetime.fromtimestamp(path.getmtime(file_path)) if delta.days > 0: remove(file_path) print(f'uploaded and remove: {file_path}') else: print(f'uploaded: {file_path}')
def main() -> None: logger.info("Loading parameters ...") if not any(storage in os.environ for storage in ["BRIEF_OUTPUT", "DROPBOX_ACCESS_TOKEN"]): raise PressBriefError("No storage provided!") # bot parameters limit_per_rss = os.getenv("LIMIT_PER_RSS", 4) url2qrcode = str2bool(os.getenv("URL2QR", "False")) # storage pamaeters brief_output = os.getenv("BRIEF_OUTPUT", None) dropbox_access_token = os.getenv("DROPBOX_ACCESS_TOKEN", None) try: limit_per_rss = int(limit_per_rss) if limit_per_rss > 50: logger.warning( "LIMIT_PER_RSS is greater than 50! Reducing to maximum (50) ..." ) limit_per_rss = 50 except ValueError: logger.warning( "LIMIT_PER_RSS must be an integer! Setting to default (10) ...") limit_per_rss = 10 if brief_output is not None: try: brief_output = Path(brief_output) if not brief_output.is_dir(): brief_output.mkdir(parents=True) if not os.access(brief_output, os.W_OK): raise PermissionError( f"No write permissions on `{brief_output}`!") except (FileExistsError, PermissionError): logger.error(f"The path `{brief_output}` is broken!") raise if dropbox_access_token is not None: try: dbx = Dropbox(dropbox_access_token) dbx.users_get_current_account() except (AuthError, BadInputError): logger.error("`DROPBOX_ACCESS_TOKEN` is invalid!") raise logger.info("Parameters loaded") logger.info("Extracting news ...") config_path = Path(os.getcwd()) / "config.yaml" if not config_path.is_file(): raise PressBriefError("`config.yaml` file not found!") try: with open(config_path) as f: config = yaml.full_load(f.read()) newspapers = map( lambda newspaper: Newspaper( config["newspapers"][newspaper]["name"], config["newspapers"][newspaper]["rss"], limit_per_rss, ), config["newspapers"], ) except KeyError: logger.error("Corrupted `config.yaml` file!") raise logger.info("Exporting brief ...") exporter = PDFExporter(url2qrcode) date_str = datetime.now().strftime("%Y-%m-%d") title = f"Daily Press Brief ({date_str})" subtitle = f"{limit_per_rss} news/RSS feeds, {datetime.now().strftime('%H:%M:%S UTC')}" pdf = exporter.export(newspapers, title, subtitle) filename = f"pressbrief-daily-{date_str}.pdf" if brief_output is not None: logger.info("Saving locally...") brief_path = brief_output / filename with open(brief_path, "wb") as f: f.write(pdf.getbuffer()) logger.info("Brief saved") if dropbox_access_token is not None: logger.info("Uploading to Dropbox...") brief_path = Path("/") / filename try: dbx.files_upload(pdf.getvalue(), brief_path.as_posix()) except ApiError: logger.error("Brief exists!") raise logger.info("Brief uploaded")
class DropBoxStorage(Storage): """DropBox Storage class for Django pluggable storage system.""" CHUNK_SIZE = 4 * 1024 * 1024 def __init__(self, oauth2_access_token=None, root_path=None): oauth2_access_token = DROPBOX_OAUTH2_TOKEN self.root_path = DROPBOX_ROOT_PATH if oauth2_access_token is None: raise ImproperlyConfigured("Você deve configurar um token em DROPBOX_OAUTH2_TOKEN ou em settings.py") self.dbx = Dropbox(oauth2_access_token) def user_profile(self): self.dt = self.dbx.users_get_current_account() print(self.dt) def list_dir_and_files_all(self): try: self.dt = self.dbx.files_list_folder(self.root_path) print('DIRETÓRIOS\n') self.list_subdirs(self.dt) except: self.dt = self.dbx.files_get_metadata(self.root_path) print('ARQUIVOS\n') if (isinstance(self.dt, dropbox.files.FileMetadata)): print('FUNCIONA') file = self.list_files(self.dt) return file def list_subdirs(self, dt): for entry in self.dt.entries: i = entry.path_display print(i) def list_files(self): self.dt = self.dbx.files_get_metadata(self.root_path) file = self.dt.path_display print(file) return file def upload_file(self): #print('Uploading para pasta ', DROPBOX_ROOT_PATH_NEW) time = datetime.datetime.now() time = time.strftime("%Y%m%d%H%M%S") FILEPATH = self.simple_backup() with open(FILEPATH, 'rb') as f: self.dbx.files_upload(f.read(), DROPBOX_ROOT_PATH_NEW + '/' + time + '.dump') link = self.dbx.sharing_create_shared_link_with_settings(DROPBOX_ROOT_PATH_NEW + '/' + time + '.dump') url = link.url dl_url = re.sub(r"\?dl\=0", "?dl=1", url) return dl_url def upload_file_compress(self, filename=''): print('Uploading para pasta ', DROPBOX_ROOT_PATH_NEW) t = datetime.datetime.now() t.strftime("%Y%m%d%H%M%S") new_basename = os.path.basename(filename) with open(filename, 'rb') as f: self.dbx.files_upload(f.read(), DROPBOX_ROOT_PATH_NEW + '/' + new_basename) link = self.dbx.sharing_create_shared_link_with_settings( DROPBOX_ROOT_PATH_NEW + '/' + new_basename) url = link.url dl_url = re.sub(r"\?dl\=0", "?dl=1", url) return dl_url def download_file(self, file=''): self.file = (DROPBOX_ROOT_PATH + '/' + file) file_name = self.file.replace('/sistemaweb/backup/', '') print('\nDownloading... /data/backup/' + file_name) try: metadata, res = self.dbx.files_download(self.file) except: pass metadata, res = self.dbx.files_download(file) final_path = ROOT_DIR + '/data/backup/' + file_name f = open(final_path, "wb") f.write(res.content) f.close() if '.zip' in final_path or '.gz' in final_path: print('Arquivo compactado...efetuando descompressão de dados.') self.uncompress_file(final_path) new_basename = os.path.basename(final_path).replace('.gz', '') return new_basename return final_path def list_dirs_root_path(self): self.dt = self.dbx.files_list_folder(self.root_path) #print('ARQUIVOS ENCONTRADOS SERÃO LISTADOS ABAIXO:\n') dir = self.download_file(self.dt.entries[-1].path_display) #self.list_files_root_path(self.dt) return dir def list_files_all(self): self.dt = self.dbx.files_list_folder(self.root_path) #print('ARQUIVOS ENCONTRADOS SERÃO LISTADOS ABAIXO:\n') self.data = [] for entry in self.dt.entries: data = {} data['backup_link'] data['client_modified'] = entry.client_modified data['size'] = str(entry.size)+" bytes" t = entry.client_modified time = datetime.timedelta(hours=2) hora = datetime.datetime.strptime(str(t), '%Y-%m-%d %H:%M:%S') now = hora - time size = entry.size size = str(size)+' bytes' display = entry.path_display print(display, now , size) self.data.append(data) return self.data def simple_backup(self): g = dbbackup.get_connector() execute_from_command_line(["manage.py", "dbbackup", "-v", "1"]) filename = (ROOT_DIR + '/data/backup/' + g.generate_filename()) #print(filename) return filename def compress_file(self, filename='', n=''): if n == '': self.compress_all(filename) elif n == '1': file = self.compress_all(filename) action = self.upload_file_compress(file) return action def compress_all(self, filename): if 'C:' in filename: new_basename = os.path.basename(filename) file = (ROOT_DIR + '/data/backup/' + new_basename + '.gz') f = open(filename, 'rb') data = f.read() f = gzip.open(file, 'wb') f.write(data) f.close() #print('Arquivo compactado com sucesso!!!') return file else: filepath = (ROOT_DIR + '/data/backup/' + filename) file = (ROOT_DIR + '/data/backup/' + filename + '.gz') f = open(filepath, 'rb') data = f.read() f = gzip.open(file, 'wb') f.write(data) f.close() #print('Arquivo compactado com sucesso!!!') return file def uncompress_file(self, filename): if 'C:' in filename: new_basename = os.path.basename(filename).replace('.gz', '') local_file = (ROOT_DIR + '/data/backup/' + new_basename) f = gzip.open(filename, 'rb') data = f.read() f.close() f = open(local_file, 'wb') f.write(data) f.close() print('Arquivo descomprimido com sucesso!!!') else: local_file = (ROOT_DIR + '/data/backup/' + filename) new_basename = os.path.basename(filename).replace('.gz', '') file = (ROOT_DIR + '/data/backup/' + new_basename) f = gzip.open(local_file, 'rb') data = f.read() f.close() f = open(file, 'wb') f.write(data) f.close() print('Arquivo descomprimido com sucesso!!!') def restore_db(self, filepath=''): print("VEJA O FILEPATH: ",filepath) execute_from_command_line(["manage.py", "dbrestore", "-v", "1", "--noinput", "-i", filepath]) def restore(self): # faz a restauração do banco de dados a partir de um backup salvo na dropbox. file_name = DropBoxStorage().list_dirs_root_path() new_basename = file_name if '/' in file_name: basename = shutil.copy(file_name, ROOT_DIR + '/data/backup/') new_basename = os.path.basename(basename) DropBoxStorage().restore_db(new_basename) from django.core.management import call_command import django django.setup() call_command('dbrestore', '-v', '1', "--noinput", "-i", new_basename) #execute_from_command_line(["manage.py", "dbrestore", "-v", "1", "--noinput", "-i", new_basename]) """
class DropboxStorage(Storage): """ A storage class providing access to resources in a Dropbox Public folder. """ def __init__(self, location='/Public'): self.client = Dropbox(ACCESS_TOKEN) self.account_info = self.client.users_get_current_account() self.location = location self.base_url = 'https://dl.dropboxusercontent.com/' def _get_abs_path(self, name): return os.path.realpath(os.path.join(self.location, name)) def _open(self, name, mode='rb'): name = self._get_abs_path(name) remote_file = DropboxFile(name, self, mode=mode) return remote_file def _save(self, name, content): name = self._get_abs_path(name) directory = os.path.dirname(name) if not self.exists(directory) and directory: self.client.files_create_folder(directory) # response = self.client.files_get_metadata(directory) # if not response['is_dir']: # raise IOError("%s exists and is not a directory." % directory) abs_name = os.path.realpath(os.path.join(self.location, name)) foo = self.client.files_upload(content.read(), abs_name) return name def delete(self, name): name = self._get_abs_path(name) self.client.files_delete(name) def exists(self, name): name = self._get_abs_path(name) try: self.client.files_get_metadata(name) except ApiError as e: if e.error.is_path() and e.error.get_path().is_not_found( ): # not found return False raise e return True def listdir(self, path): path = self._get_abs_path(path) response = self.client.files_list_folder(path) directories = [] files = [] for entry in response.entries: if type(entry) == FolderMetadata: directories.append(os.path.basename(entry.path_display)) elif type(entry) == FileMetadata: files.append(os.path.basename(entry.path_display)) return directories, files def size(self, name): cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name)) size = cache.get(cache_key) if not size: size = self.client.files_get_metadata(name).size cache.set(cache_key, size, CACHE_TIMEOUT) return size def url(self, name): cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name)) url = cache.get(cache_key) if not url: url = self.client.files_get_temporary_link(name).link cache.set(cache_key, url, SHARE_LINK_CACHE_TIMEOUT) return url def get_available_name(self, name): """ Returns a filename that's free on the target storage system, and available for new content to be written to. """ name = self._get_abs_path(name) dir_name, file_name = os.path.split(name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, add an underscore and a number (before # the file extension, if one exists) to the filename until the generated # filename doesn't exist. count = itertools.count(1) while self.exists(name): # file_ext includes the dot. name = os.path.join( dir_name, "%s_%s%s" % (file_root, count.next(), file_ext)) return name
class rpiImageDbxClass(rpiBaseClass): """ Implements the rpiImageDb class to manage images in a remote directory (dropbox). """ def __init__(self, name, rpi_apscheduler, rpi_events, rpi_config, cam_rpififo=None): ### Get the Dbx error event #self._eventDbErr = rpi_events.eventErrList["DBXJob"] ### Get the custom config parameters self._config = rpi_config ### Get FIFO buffer for images from the camera (deque) self._imageFIFO = cam_rpififo ### The FIFO buffer for the uploaded images (deque) self.imageUpldFIFO = rpififo.rpiFIFOClass([], 576) self.imageUpldFIFO.crtSubDir = '' ### Init base class super().__init__(name, rpi_apscheduler, rpi_events) def __repr__(self): return "<%s (name=%s, rpi_apscheduler=%s, rpi_events=dict(), rpi_config=%s, dbuff_rpififo=%s)>" % (self.__class__.__name__, self.name, self._sched, self._config, self._imageFIFO) def __str__(self): msg = super().__str__() return "%s::: dbinfo: %s, config: %s\nimageUpldFIFO: %s\n%s" % \ (self.name, self.dbinfo, self._config, self.imageUpldFIFO, msg) def __del__(self): ### Clean base class super().__del__() # # Main interface methods # def jobRun(self): try: # Lock the buffer self._imageFIFO.acquireSemaphore() # Get the current images in the FIFO # Refresh the last remote image when available if len(self._imageFIFO): # Update remote cam image with the current (last) image if not (self._imageFIFO[-1] == self.crt_image_snap): self._putImage(self._imageFIFO[-1], self._config['image_snap'], True) self.crt_image_snap = self._imageFIFO[-1] self.numImgUpdDb += 1 logging.info("Updated remote %s with %s" % (self._config['image_snap'], self._imageFIFO[-1]) ) # Lock the upload buffer self.imageUpldFIFO.acquireSemaphore() # Check if a new upload sub-folder has to be used if not (self.imageUpldFIFO.crtSubDir == self._imageFIFO.crtSubDir): self.imageUpldFIFO.crtSubDir = self._imageFIFO.crtSubDir self.upldir = os.path.normpath(os.path.join(self._config['image_dir'], self.imageUpldFIFO.crtSubDir)) self._mkdirImage(self.upldir) # Upload only images in the FIFO which have not been uploaded yet for img in self._imageFIFO: if not img in self.imageUpldFIFO: self._putImage(img, os.path.join(self.upldir, os.path.basename(img))) logging.info("Uploaded %s" % img ) # Release the upload buffer self.imageUpldFIFO.releaseSemaphore() # Update status self.statusUpdate = (self.name, self.numImgUpdDb) else: # Update status self.statusUpdate = (self.name, ERRNONE) logging.info('Nothing to upload') # Handle exceptions, mostly HTTP/SSL related! except exceptions.Timeout as e: # Catching this error will catch both ReadTimeout and ConnectTimeout. raise rpiBaseClassError("%s::: jobRun(): Connect/ReadTimeoutError:\n%s" % (self.name, str(e)), ERRLEV2) except exceptions.ConnectionError as e: # A Connection error occurred. raise rpiBaseClassError("%s::: jobRun(): ConnectionError:\n%s" % (self.name, str(e)), ERRLEV2) except exceptions.HTTPError as e: # An HTTP error occurred. raise rpiBaseClassError("%s::: jobRun(): HTTPError:\n%s" % (self.name, str(e)), ERRLEV2) except exceptions.RequestException as e: # There was an ambiguous exception that occurred while handling your request. raise rpiBaseClassError("%s::: jobRun(): RequestException:\n%s" % (self.name, str(e)), ERRLEV2) # except BadStatusLine as e: # self.eventErr_set('run()') # logging.debug("BadStatusLine:\n%s" % str(e)) # pass except rpiBaseClassError as e: if e.errval == ERRCRIT: self.endDayOAM() raise rpiBaseClassError("%s::: jobRun(): %s" % (self.name, e.errmsg), e.errval) except RuntimeError as e: self.endDayOAM() raise rpiBaseClassError("%s::: jobRun(): RuntimeError:\n%s" % (self.name, str(e)), ERRCRIT) except: self.endDayOAM() raise rpiBaseClassError("%s::: jobRun(): Unhandled Exception:\n%s" % (self.name, str(sys.exc_info())), ERRCRIT) finally: # Release the buffer self._imageFIFO.releaseSemaphore() def initClass(self): """" (re)Initialize the class. """ #self.imageDbHash = None self._imageDbCursor = None self.imageDbList = [] self.numImgUpdDb = 0 self.crt_image_snap = None self.imgid = self._imageFIFO.camID + '.jpg' self.upldir = os.path.normpath(os.path.join(self._config['image_dir'], self.imageUpldFIFO.crtSubDir)) self.logfile = './upldlog.json' ### When there are already images listed in the upload log file, then # make sure we don't upload them to the remote folder again # Else, create the file with an empty list; to be updated in endDayOAM() try: self.imageUpldFIFO.acquireSemaphore() self.imageUpldFIFO.clear() if os.path.isfile(self.logfile): with open(self.logfile,'r') as logf: upldimg = json.load(logf) for img in upldimg: self.imageUpldFIFO.append(img) del upldimg logging.info("%s::: Local log file %s found and loaded." % (self.name, self.logfile)) else: with open(self.logfile,'w') as logf: json.dump([], logf) logging.info("%s::: Local log file %s initialized." % (self.name, self.logfile)) except IOError: raise rpiBaseClassError("%s::: initClass(): Local log file %s was not found or could not be created." % (self.name, self.logfile), ERRCRIT) finally: # Release the upload buffer self.imageUpldFIFO.releaseSemaphore() ### Init Dropbox API client self._token_file = self._config['token_file'] self._dbx = None self.dbinfo = None try: with open(self._token_file, 'r') as token: self._dbx = Dropbox(token.read()) info = self._dbx.users_get_current_account() # info._all_field_names_ = # {'account_id', 'is_paired', 'locale', 'email', 'name', 'team', 'country', 'account_type', 'referral_link'} self.dbinfo ={'email': info.email, 'referral_link': info.referral_link} logging.info("%s::: Loaded access token from ''%s''" % (self.name, self._token_file) ) ### Create remote root folder (relative to app root) if it does not exist yet self._mkdirImage(os.path.normpath(self._config['image_dir'])) except rpiBaseClassError as e: if e.errval == ERRCRIT: self.endDayOAM() raise rpiBaseClassError("initClass(): %s" % e.errmsg, e.errval) except IOError: self.endDayOAM() raise rpiBaseClassError("initClass(): Token file ''%s'' could not be read." % (self.name, self._token_file), ERRCRIT) except AuthError as e: self.endDayOAM() raise rpiBaseClassError("initClass(): AuthError:\n%s" % e.error, ERRCRIT) except DropboxException as e: self.endDayOAM() raise rpiBaseClassError("initClass(): DropboxException:\n%s" % str(e), ERRCRIT) except InternalServerError as e: self.endDayOAM() raise rpiBaseClassError("initClass(): InternalServerError:\n%s" % str(e.status_code), ERRCRIT) def endDayOAM(self): """ End-of-Day Operation and Maintenance sequence. """ self._lsImage(self.upldir) logging.info("%s::: %d images in the remote folder %s" % (self.name, len(self.imageDbList), self.upldir)) # Lock the uplaod buffer self.imageUpldFIFO.acquireSemaphore() try: upldimg=[] for img in self.imageUpldFIFO: upldimg.append(img) with open(self.logfile,'w') as logf: json.dump(upldimg, logf) del upldimg logging.info("%s::: Local log file %s updated." % (self.name, self.logfile)) except IOError: raise rpiBaseClassError("endDayOAM(): Local log file %s was not found." % self.logfile, ERRCRIT) finally: # Release the upload buffer self.imageUpldFIFO.releaseSemaphore() # def endOAM(self): # """ # End OAM procedure. # """ @atexit.register def atexitend(): self.endDayOAM() def _lsImage(self,from_path): """ List the image/video files in the remote directory. Stores the found file names in self.imageDbList. """ try: if self._imageDbCursor is None: self.ls_ref = self._dbx.files_list_folder('/' + os.path.normpath(from_path), recursive=False, include_media_info=True ) else: new_ls = self._dbx.files_list_folder_continue(self._imageDbCursor) if new_ls.entries == []: logging.debug("%s::: _lsImage():: No changes on the server." % self.name) else: self.ls_ref = new_ls # Select only images and only the ones for the current imgid (camid) foundImg = False for f in self.ls_ref.entries: if 'media_info' in f._all_field_names_ and \ f.media_info is not None: if self.imgid in f.path_lower: img = '.%s' % f.path_lower foundImg = True if not img in self.imageDbList: self.imageDbList.append(img) if not foundImg: self.imageDbList = [] ### Store the hash of the folder self._imageDbCursor = self.ls_ref.cursor if len(self.imageDbList) > 0: logging.debug("%s::: _lsImage():: imageDbList[0..%d]: %s .. %s" % (self.name, len(self.imageDbList)-1, self.imageDbList[0], self.imageDbList[-1]) ) else: logging.debug("%s::: _lsImage():: imageDbList[]: empty" % self.name) except ApiError as e: raise rpiBaseClassError("_lsImage(): %s" % e.error, ERRLEV2) def _putImage(self, from_path, to_path, overwrite=False): """ Copy local file to remote file. Stores the uploaded files names in self.imageUpldFIFO. Examples: _putImage('./path/test.jpg', '/path/dropbox-upload-test.jpg') """ try: mode = (WriteMode.overwrite if overwrite else WriteMode.add) with open(from_path, "rb") as from_file: self._dbx.files_upload( from_file, '/' + os.path.normpath(to_path), mode) if not overwrite: self.imageUpldFIFO.append(from_path) logging.debug("%s::: _putImage(): Uploaded file from %s to remote %s" % (self.name, from_path, to_path)) except IOError: raise rpiBaseClassError("_putImage(): Local img file %s could not be opened." % from_path, ERRCRIT) except ApiError as e: raise rpiBaseClassError("_putImage(): %s" % e.error, ERRLEV2) def _mkdirImage(self, path): """ Create a new remote directory. Examples: _mkdirImage('/dropbox_dir_test') """ try: self._dbx.files_create_folder('/' + os.path.normpath(path)) logging.debug("%s::: Remote output folder /%s created." % (self.name, path)) except ApiError as e: noerr = False # dropbox.files.CreateFolderError if e.error.is_path(): # dropbox.files.WriteError we = e.error.get_path() if we.is_conflict(): # dropbox.files.WriteConflictError wce = we.get_conflict() # union tag is 'folder' if wce.is_folder(): logging.info("%s::: Remote output folder /%s already exist!" % (self.name, path)) noerr = True if not noerr: raise rpiBaseClassError("_mkdirImage(): Remote output folder /%s was not created! %s" % (path, e.error), ERRCRIT) else: pass def _mvImage(self, from_path, to_path): """ Move/rename a remote file or directory. Examples: _mvImage('./path1/dropbox-move-test.jpg', '/path2/dropbox-move-test.jpg') """ try: self._dbx.files_move( '/' + os.path.normpath(from_path), '/' + os.path.normpath(to_path) ) logging.debug("%s::: _mvImage(): Moved file from %s to %s" % (self.name, from_path, to_path)) except ApiError as e: raise rpiBaseClassError("_mvImage(): Image %s could not be moved to %s! %s" % (from_path, to_path, e.error), ERRLEV2)
class DPBXBackend(duplicity.backend.Backend): """Connect to remote store using Dr*pB*x service""" def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) self.api_account = None self.api_client = None self.auth_flow = None self.login() def load_access_token(self): return os.environ.get('DPBX_ACCESS_TOKEN', None) def save_access_token(self, access_token): raise BackendException('dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' % access_token) def obtain_access_token(self): log.Info("dpbx: trying to obtain access token") for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']: if env_var not in os.environ: raise BackendException('dpbx: %s environment variable not set' % env_var) app_key = os.environ['DPBX_APP_KEY'] app_secret = os.environ['DPBX_APP_SECRET'] if not sys.stdout.isatty() or not sys.stdin.isatty(): log.FatalError('dpbx error: cannot interact, but need human attention', log.ErrorCode.backend_command_error) auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret) log.Debug('dpbx,auth_flow.start()') authorize_url = auth_flow.start() print print '-' * 72 print "1. Go to: " + authorize_url print "2. Click \"Allow\" (you might have to log in first)." print "3. Copy the authorization code." print '-' * 72 auth_code = raw_input("Enter the authorization code here: ").strip() try: log.Debug('dpbx,auth_flow.finish(%s)' % auth_code) access_token, _ = auth_flow.finish(auth_code) except Exception as e: raise BackendException('dpbx: Unable to obtain access token: %s' % e) log.Info("dpbx: Authentication successfull") self.save_access_token(access_token) def login(self): if self.load_access_token() is None: self.obtain_access_token() self.api_client = Dropbox(self.load_access_token()) self.api_account = None try: log.Debug('dpbx,users_get_current_account([token])') self.api_account = self.api_client.users_get_current_account() log.Debug("dpbx,%s" % self.api_account) except (BadInputError, AuthError) as e: log.Debug('dpbx,exception: %s' % e) log.Info("dpbx: Authentication failed. Trying to obtain new access token") self.obtain_access_token() # We're assuming obtain_access_token will throw exception. So this line should not be reached raise BackendException("dpbx: Please update DPBX_ACCESS_TOKEN and try again") log.Info("dpbx: Successfully authenticated as %s" % self.api_account.name.display_name) def _error_code(self, operation, e): if isinstance(e, ApiError): err = e.error if isinstance(err, GetMetadataError) and err.is_path(): if err.get_path().is_not_found(): return log.ErrorCode.backend_not_found elif isinstance(err, DeleteError) and err.is_path_lookup(): lookup = e.error.get_path_lookup() if lookup.is_not_found(): return log.ErrorCode.backend_not_found @command() def _put(self, source_path, remote_filename): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip() file_size = os.path.getsize(source_path.name) f = source_path.open('rb') try: progress.report_transfer(0, file_size) buf = f.read(DPBX_UPLOAD_CHUNK_SIZE) log.Debug('dpbx,files_upload_session_start([%d bytes]), total: %d' % (len(buf), file_size)) upload_sid = self.api_client.files_upload_session_start(buf) log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid) upload_cursor = UploadSessionCursor(upload_sid.session_id, f.tell()) commit_info = CommitInfo(remote_path, mode=WriteMode.overwrite, autorename=False, client_modified=None, mute=True) res_metadata = None progress.report_transfer(f.tell(), file_size) requested_offset = None current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE retry_number = globals.num_retries # We're doing our own error handling and retrying logic because # we can benefit from Dpbx chunked upload and retry only failed chunk while (f.tell() < file_size) or not res_metadata: try: if requested_offset is not None: upload_cursor.offset = requested_offset if f.tell() != upload_cursor.offset: f.seek(upload_cursor.offset) buf = f.read(current_chunk_size) # reset temporary status variables requested_offset = None current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE retry_number = globals.num_retries if len(buf) != 0: log.Debug('dpbx,files_upload_sesssion_append([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset)) self.api_client.files_upload_session_append(buf, upload_cursor.session_id, upload_cursor.offset) else: log.Debug('dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset)) res_metadata = self.api_client.files_upload_session_finish(buf, upload_cursor, commit_info) upload_cursor.offset = f.tell() log.Debug('progress: %d of %d' % (upload_cursor.offset, file_size)) progress.report_transfer(upload_cursor.offset, file_size) except ApiError as e: error = e.error if isinstance(error, UploadSessionLookupError) and error.is_incorrect_offset(): # Server reports that we should send another chunk. Most likely this is caused by # network error during previous upload attempt. In such case we'll get expected offset # from server and it's enough to just seek() and retry again new_offset = error.get_incorrect_offset().correct_offset log.Debug('dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)' % (upload_cursor.offset, new_offset)) if requested_offset is not None: # chunk failed even after seek attempt. Something strange and no safe way to recover raise BackendException("dpbx: unable to chunk upload") else: # will seek and retry requested_offset = new_offset continue raise except ConnectionError as e: log.Debug('dpbx,files_upload_session_append: %s' % e) retry_number -= 1 if retry_number == 0: raise # We don't know for sure, was partial upload successfull or not. So it's better to retry smaller amount to avoid extra reupload log.Info('dpbx: sleeping a bit before chunk retry') time.sleep(30) current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5 requested_offset = None continue if f.tell() != file_size: raise BackendException('dpbx: something wrong') log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata) progress.report_transfer(f.tell(), file_size) # A few sanity checks if res_metadata.path_display != remote_path: raise BackendException('dpbx: result path mismatch: %s (expected: %s)' % (res_metadata.path_display, remote_path)) if res_metadata.size != file_size: raise BackendException('dpbx: result size mismatch: %s (expected: %s)' % (res_metadata.size, file_size)) finally: f.close() @command() def _get(self, remote_filename, local_path): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip() log.Debug('dpbx,files_download(%s)' % remote_path) res_metadata, http_fd = self.api_client.files_download(remote_path) log.Debug('dpbx,files_download(%s): %s, %s' % (remote_path, res_metadata, http_fd)) file_size = res_metadata.size to_fd = None progress.report_transfer(0, file_size) try: to_fd = local_path.open('wb') for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE): to_fd.write(c) progress.report_transfer(to_fd.tell(), file_size) finally: if to_fd: to_fd.close() http_fd.close() # It's different from _query() check because we're not querying metadata again. # Since this check is free, it's better to have it here local_size = os.path.getsize(local_path.name) if local_size != file_size: raise BackendException("dpbx: wrong file size: %d (expected: %d)" % (local_size, file_size)) local_path.setdata() @command() def _list(self): # Do a long listing to avoid connection reset remote_dir = '/' + urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip() log.Debug('dpbx.files_list_folder(%s)' % remote_dir) resp = self.api_client.files_list_folder(remote_dir) log.Debug('dpbx.list(%s): %s' % (remote_dir, resp)) res = [] while True: res.extend([entry.name for entry in resp.entries]) if not resp.has_more: break resp = self.api_client.files_list_folder_continue(resp.cursor) # Warn users of old version dpbx about automatically renamed files self.check_renamed_files(res) return res @command() def _delete(self, filename): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, filename).rstrip() log.Debug('dpbx.files_delete(%s)' % remote_path) self.api_client.files_delete(remote_path) # files_permanently_delete seems to be better for backup purpose # but it's only available for Business accounts # self.api_client.files_permanently_delete(remote_path) @command() def _close(self): """close backend session? no! just "flush" the data""" log.Debug('dpbx.close():') @command() def _query(self, filename): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, filename).rstrip() log.Debug('dpbx.files_get_metadata(%s)' % remote_path) info = self.api_client.files_get_metadata(remote_path) log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info)) return {'size': info.size} def check_renamed_files(self, file_list): bad_list = [x for x in file_list if DPBX_AUTORENAMED_FILE_RE.search(x) is not None] if len(bad_list) == 0: return log.Warn('-' * 72) log.Warn('Warning! It looks like there are automatically renamed files on backend') log.Warn('They were probably created when using older version of duplicity.') log.Warn('') log.Warn('Please check your backup consistency. Most likely you will need to choose') log.Warn('largest file from duplicity-* (number).gpg and remove brackets from its name.') log.Warn('') log.Warn('These files are not managed by duplicity at all and will not be') log.Warn('removed/rotated automatically.') log.Warn('') log.Warn('Affected files:') for x in bad_list: log.Warn('\t%s' % x) log.Warn('') log.Warn('In any case it\'s better to create full backup.') log.Warn('-' * 72)
# redirect_to("/dropbox-auth-start") # except CsrfException as e: # http_status(403) # except NotApprovedException as e: # flash('Not approved? Why not?') # return redirect_to("/home") # except ProviderException as e: # logger.log("Auth error: %s" % (e,)) # http_status(403) from dropbox import DropboxOAuth2FlowNoRedirect from dropbox import Dropbox APP_KEY='cbm74gzdx3jn00g' APP_SECRET='chq2mprrc8ldtfg' auth_flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET) authorize_url = auth_flow.start() print ("1. Go to: " + authorize_url) print ("2. Click \"Allow\" (you might have to log in first).") print ("3. Copy the authorization code.") auth_code = input("Enter the authorization code here: ").strip() try: access_token, user_id = auth_flow.finish(auth_code) except Exception as e: print('Error: %s' % (e,)) dbx = Dropbox(access_token) print(dbx.users_get_current_account())
class DropboxStorage(Storage): """ A storage class providing access to resources in a Dropbox Public folder. """ def __init__(self, location='/Public'): self.client = Dropbox(ACCESS_TOKEN) self.account_info = self.client.users_get_current_account() self.location = location self.base_url = 'https://dl.dropboxusercontent.com/' def _get_abs_path(self, name): return os.path.realpath(os.path.join(self.location, name)) def _open(self, name, mode='rb'): name = self._get_abs_path(name) remote_file = DropboxFile(name, self, mode=mode) return remote_file def _save(self, name, content): name = self._get_abs_path(name) directory = os.path.dirname(name) if not self.exists(directory) and directory: self.client.files_create_folder(directory) # response = self.client.files_get_metadata(directory) # if not response['is_dir']: # raise IOError("%s exists and is not a directory." % directory) abs_name = os.path.realpath(os.path.join(self.location, name)) foo = self.client.files_upload(content.read(), abs_name) return name def delete(self, name): name = self._get_abs_path(name) self.client.files_delete(name) def exists(self, name): name = self._get_abs_path(name) try: self.client.files_get_metadata(name) except ApiError as e: if e.error.is_path() and e.error.get_path().is_not_found(): # not found return False raise e return True def listdir(self, path): path = self._get_abs_path(path) response = self.client.files_list_folder(path) directories = [] files = [] for entry in response.entries: if type(entry) == FolderMetadata: directories.append(os.path.basename(entry.path_display)) elif type(entry) == FileMetadata: files.append(os.path.basename(entry.path_display)) return directories, files def size(self, name): cache_key = 'django-dropbox-size:{}'.format(filepath_to_uri(name)) size = cache.get(cache_key) if not size: size = self.client.files_get_metadata(name).size cache.set(cache_key, size, CACHE_TIMEOUT) return size def url(self, name): if name.startswith(self.location): name = name[len(self.location) + 1:] name = os.path.basename(self.location) + "/" + name if self.base_url is None: raise ValueError("This file is not accessible via a URL.") myurl = urlparse.urljoin(self.base_url, filepath_to_uri(name)) if "static" not in self.location: # Use a dynamic URL for "non-static" files. try: new_name = os.path.dirname(self.location) + "/" + name fp = filepath_to_uri(new_name) cache_key = 'django-dropbox-size:{}'.format(fp) myurl = cache.get(cache_key) if not myurl: try: shared_link = self.client.sharing_create_shared_link(fp) myurl = shared_link.url + '&raw=1' logger.debug("shared link: {0}, myurl: {1}".format(shared_link, myurl)) except Exception,e: logger.exception(e) if myurl is None: temp_link = self.client.files_get_temporary_link(fp) myurl = temp_link.link logger.debug("temp link: {0}, myurl: {1}".format(temp_link, myurl)) cache.set(cache_key, myurl, SHARE_LINK_CACHE_TIMEOUT) except Exception,e: logger.exception(e) return myurl """
class DPBXBackend(duplicity.backend.Backend): """Connect to remote store using Dr*pB*x service""" def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) self.api_account = None self.api_client = None self.auth_flow = None self.login() def user_authenticated(self): try: account = self.api_client.users_get_current_account() log.Debug("User authenticated as ,%s" % account) return True except: log.Debug('User not authenticated') return False def load_access_token(self): return os.environ.get('DPBX_ACCESS_TOKEN', None) def save_access_token(self, access_token): raise BackendException( 'dpbx: Please set DPBX_ACCESS_TOKEN=\"%s\" environment variable' % access_token) def obtain_access_token(self): log.Info("dpbx: trying to obtain access token") for env_var in ['DPBX_APP_KEY', 'DPBX_APP_SECRET']: if env_var not in os.environ: raise BackendException( 'dpbx: %s environment variable not set' % env_var) app_key = os.environ['DPBX_APP_KEY'] app_secret = os.environ['DPBX_APP_SECRET'] if not sys.stdout.isatty() or not sys.stdin.isatty(): log.FatalError( 'dpbx error: cannot interact, but need human attention', log.ErrorCode.backend_command_error) auth_flow = DropboxOAuth2FlowNoRedirect(app_key, app_secret) log.Debug('dpbx,auth_flow.start()') authorize_url = auth_flow.start() print print '-' * 72 print "1. Go to: " + authorize_url print "2. Click \"Allow\" (you might have to log in first)." print "3. Copy the authorization code." print '-' * 72 auth_code = raw_input("Enter the authorization code here: ").strip() try: log.Debug('dpbx,auth_flow.finish(%s)' % auth_code) authresult = auth_flow.finish(auth_code) except Exception as e: raise BackendException('dpbx: Unable to obtain access token: %s' % e) log.Info("dpbx: Authentication successfull") self.save_access_token(authresult.access_token) def login(self): if self.load_access_token() is None: self.obtain_access_token() self.api_client = Dropbox(self.load_access_token()) self.api_account = None try: log.Debug('dpbx,users_get_current_account([token])') self.api_account = self.api_client.users_get_current_account() log.Debug("dpbx,%s" % self.api_account) except (BadInputError, AuthError) as e: log.Debug('dpbx,exception: %s' % e) log.Info( "dpbx: Authentication failed. Trying to obtain new access token" ) self.obtain_access_token() # We're assuming obtain_access_token will throw exception. # So this line should not be reached raise BackendException( "dpbx: Please update DPBX_ACCESS_TOKEN and try again") log.Info("dpbx: Successfully authenticated as %s" % self.api_account.name.display_name) def _error_code(self, operation, e): if isinstance(e, ApiError): err = e.error if isinstance(err, GetMetadataError) and err.is_path(): if err.get_path().is_not_found(): return log.ErrorCode.backend_not_found elif isinstance(err, DeleteError) and err.is_path_lookup(): lookup = e.error.get_path_lookup() if lookup.is_not_found(): return log.ErrorCode.backend_not_found @command() def _put(self, source_path, remote_filename): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip() file_size = os.path.getsize(source_path.name) progress.report_transfer(0, file_size) if file_size < DPBX_UPLOAD_CHUNK_SIZE: # Upload whole file at once to avoid extra server request res_metadata = self.put_file_small(source_path, remote_path) else: res_metadata = self.put_file_chunked(source_path, remote_path) # A few sanity checks if res_metadata.path_display != remote_path: raise BackendException( 'dpbx: result path mismatch: %s (expected: %s)' % (res_metadata.path_display, remote_path)) if res_metadata.size != file_size: raise BackendException( 'dpbx: result size mismatch: %s (expected: %s)' % (res_metadata.size, file_size)) def put_file_small(self, source_path, remote_path): if not self.user_authenticated(): self.login() file_size = os.path.getsize(source_path.name) f = source_path.open('rb') try: log.Debug('dpbx,files_upload(%s, [%d bytes])' % (remote_path, file_size)) res_metadata = self.api_client.files_upload( f.read(), remote_path, mode=WriteMode.overwrite, autorename=False, client_modified=None, mute=True) log.Debug('dpbx,files_upload(): %s' % res_metadata) progress.report_transfer(file_size, file_size) return res_metadata finally: f.close() def put_file_chunked(self, source_path, remote_path): if not self.user_authenticated(): self.login() file_size = os.path.getsize(source_path.name) f = source_path.open('rb') try: buf = f.read(DPBX_UPLOAD_CHUNK_SIZE) log.Debug( 'dpbx,files_upload_session_start([%d bytes]), total: %d' % (len(buf), file_size)) upload_sid = self.api_client.files_upload_session_start(buf) log.Debug('dpbx,files_upload_session_start(): %s' % upload_sid) upload_cursor = UploadSessionCursor(upload_sid.session_id, f.tell()) commit_info = CommitInfo(remote_path, mode=WriteMode.overwrite, autorename=False, client_modified=None, mute=True) res_metadata = None progress.report_transfer(f.tell(), file_size) requested_offset = None current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE retry_number = globals.num_retries is_eof = False # We're doing our own error handling and retrying logic because # we can benefit from Dpbx chunked upload and retry only failed # chunk while not is_eof or not res_metadata: try: if requested_offset is not None: upload_cursor.offset = requested_offset if f.tell() != upload_cursor.offset: f.seek(upload_cursor.offset) buf = f.read(current_chunk_size) is_eof = f.tell() >= file_size if not is_eof and len(buf) == 0: continue # reset temporary status variables requested_offset = None current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE retry_number = globals.num_retries if not is_eof: assert len(buf) != 0 log.Debug( 'dpbx,files_upload_sesssion_append([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset)) self.api_client.files_upload_session_append( buf, upload_cursor.session_id, upload_cursor.offset) else: log.Debug( 'dpbx,files_upload_sesssion_finish([%d bytes], offset=%d)' % (len(buf), upload_cursor.offset)) res_metadata = self.api_client.files_upload_session_finish( buf, upload_cursor, commit_info) upload_cursor.offset = f.tell() log.Debug('progress: %d of %d' % (upload_cursor.offset, file_size)) progress.report_transfer(upload_cursor.offset, file_size) except ApiError as e: error = e.error if isinstance(error, UploadSessionLookupError ) and error.is_incorrect_offset(): # Server reports that we should send another chunk. # Most likely this is caused by network error during # previous upload attempt. In such case we'll get # expected offset from server and it's enough to just # seek() and retry again new_offset = error.get_incorrect_offset( ).correct_offset log.Debug( 'dpbx,files_upload_session_append: incorrect offset: %d (expected: %s)' % (upload_cursor.offset, new_offset)) if requested_offset is not None: # chunk failed even after seek attempt. Something # strange and no safe way to recover raise BackendException( "dpbx: unable to chunk upload") else: # will seek and retry requested_offset = new_offset continue raise except ConnectionError as e: log.Debug('dpbx,files_upload_session_append: %s' % e) retry_number -= 1 if not self.user_authenticated(): self.login() if retry_number == 0: raise # We don't know for sure, was partial upload successful or # not. So it's better to retry smaller amount to avoid extra # reupload log.Info('dpbx: sleeping a bit before chunk retry') time.sleep(30) current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5 requested_offset = None continue if f.tell() != file_size: raise BackendException('dpbx: something wrong') log.Debug('dpbx,files_upload_sesssion_finish(): %s' % res_metadata) progress.report_transfer(f.tell(), file_size) return res_metadata finally: f.close() @command() def _get(self, remote_filename, local_path): if not self.user_authenticated(): self.login() remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip() log.Debug('dpbx,files_download(%s)' % remote_path) res_metadata, http_fd = self.api_client.files_download(remote_path) log.Debug('dpbx,files_download(%s): %s, %s' % (remote_path, res_metadata, http_fd)) file_size = res_metadata.size to_fd = None progress.report_transfer(0, file_size) try: to_fd = local_path.open('wb') for c in http_fd.iter_content(DPBX_DOWNLOAD_BUF_SIZE): to_fd.write(c) progress.report_transfer(to_fd.tell(), file_size) finally: if to_fd: to_fd.close() http_fd.close() # It's different from _query() check because we're not querying metadata # again. Since this check is free, it's better to have it here local_size = os.path.getsize(local_path.name) if local_size != file_size: raise BackendException("dpbx: wrong file size: %d (expected: %d)" % (local_size, file_size)) local_path.setdata() @command() def _list(self): # Do a long listing to avoid connection reset if not self.user_authenticated(): self.login() remote_dir = '/' + urllib.unquote( self.parsed_url.path.lstrip('/')).rstrip() log.Debug('dpbx.files_list_folder(%s)' % remote_dir) res = [] try: resp = self.api_client.files_list_folder(remote_dir) log.Debug('dpbx.list(%s): %s' % (remote_dir, resp)) while True: res.extend([entry.name for entry in resp.entries]) if not resp.has_more: break resp = self.api_client.files_list_folder_continue(resp.cursor) except ApiError as e: if (isinstance(e.error, ListFolderError) and e.error.is_path() and e.error.get_path().is_not_found()): log.Debug('dpbx.list(%s): ignore missing folder (%s)' % (remote_dir, e)) else: raise # Warn users of old version dpbx about automatically renamed files self.check_renamed_files(res) return res @command() def _delete(self, filename): if not self.user_authenticated(): self.login() remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, filename).rstrip() log.Debug('dpbx.files_delete(%s)' % remote_path) self.api_client.files_delete(remote_path) # files_permanently_delete seems to be better for backup purpose # but it's only available for Business accounts # self.api_client.files_permanently_delete(remote_path) @command() def _close(self): """close backend session? no! just "flush" the data""" log.Debug('dpbx.close():') @command() def _query(self, filename): if not self.user_authenticated(): self.login() remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, filename).rstrip() log.Debug('dpbx.files_get_metadata(%s)' % remote_path) info = self.api_client.files_get_metadata(remote_path) log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info)) return {'size': info.size} def check_renamed_files(self, file_list): if not self.user_authenticated(): self.login() bad_list = [ x for x in file_list if DPBX_AUTORENAMED_FILE_RE.search(x) is not None ] if len(bad_list) == 0: return log.Warn('-' * 72) log.Warn( 'Warning! It looks like there are automatically renamed files on backend' ) log.Warn( 'They were probably created when using older version of duplicity.' ) log.Warn('') log.Warn( 'Please check your backup consistency. Most likely you will need to choose' ) log.Warn( 'largest file from duplicity-* (number).gpg and remove brackets from its name.' ) log.Warn('') log.Warn( 'These files are not managed by duplicity at all and will not be') log.Warn('removed/rotated automatically.') log.Warn('') log.Warn('Affected files:') for x in bad_list: log.Warn('\t%s' % x) log.Warn('') log.Warn('In any case it\'s better to create full backup.') log.Warn('-' * 72)
def main() -> None: logger.info("Loading parameters...") if "TARGET_USERNAME" not in os.environ: raise TweetBriefError("TARGET_USERNAME not found!") if "CONSUMER_KEY" not in os.environ: raise TweetBriefError("CONSUMER_KEY not found!") if "CONSUMER_SECRET" not in os.environ: raise TweetBriefError("CONSUMER_SECRET not found!") if not any(storage in os.environ for storage in ["BRIEF_OUTPUT", "DROPBOX_ACCESS_TOKEN"]): raise TweetBriefError("No storage provided!") # Twitter API parameters consumer_key = os.getenv("CONSUMER_KEY") consumer_secret = os.getenv("CONSUMER_SECRET") # bot parameters target_username = os.getenv("TARGET_USERNAME") single_author_max_tweets = os.getenv("SINGLE_AUTHOR_MAX_TWEETS", 3) brief_period = os.getenv("BRIEF_PERIOD", 1) brief_max_tweets = os.getenv("BRIEF_MAX_TWEETS", 30) url2qrcode = str2bool(os.getenv("URL2QR", "True")) # storage parameters brief_output = os.getenv("BRIEF_OUTPUT", None) dropbox_access_token = os.getenv("DROPBOX_ACCESS_TOKEN", None) try: single_author_max_tweets = int(single_author_max_tweets) if single_author_max_tweets > 3: logger.warning("SINGLE_AUTHOR_MAX_TWEETS is greater than 3! Setting to default (3) ...") single_author_max_tweets = 3 except ValueError: logger.warning("SINGLE_AUTHOR_MAX_TWEETS must be an integer! Setting to default (3) ...") single_author_max_tweets = 3 try: brief_period = int(brief_period) except ValueError: logger.warning("BRIEF_PERIOD must be an integer! Setting to default (1) ...") brief_period = 1 try: brief_max_tweets = int(brief_max_tweets) except ValueError: logger.warning("BRIEF_MAX_TWEETS must be an integer! Setting to default (30) ...") brief_max_tweets = 30 if brief_output is not None: try: brief_output = Path(brief_output) if not brief_output.is_dir(): brief_output.mkdir(parents=True) if not os.access(brief_output, os.W_OK): raise PermissionError(f"No write permissions on `{brief_output}`!") except (FileExistsError, PermissionError): logger.error(f"The path `{brief_output}` is broken!") raise if dropbox_access_token is not None: try: dbx = Dropbox(dropbox_access_token) dbx.users_get_current_account() except (AuthError, BadInputError): logger.error("`DROPBOX_ACCESS_TOKEN` is invalid!") raise logger.info("Parameters loaded") logger.info("Extracting tweets...") extractor = TweetExtractor(consumer_key, consumer_secret, tweet_mode="extended") tweets_in_brief = extractor.extract_top_tweets( target_username, single_author_max_tweets, brief_max_tweets, brief_period ) logger.info("Exporting brief...") exporter = PDFExporter(url2qrcode) date_str = datetime.now().strftime("%Y-%m-%d") period_desc = ( "Daily" if brief_period == 1 else "Weekly" if brief_period == 7 else "Monthly" if 30 <= brief_period <= 31 else f"Last {brief_period} days" ) title = f"{period_desc} Twitter Brief for @{target_username} ({date_str})" subtitle = f"Excluding RTs, top {single_author_max_tweets} tweets/author, {datetime.now().strftime('%H:%M:%S UTC')}" pdf = exporter.export(tweets_in_brief, title=title, subtitle=subtitle) filename = f"tweetbrief-{target_username}-{period_desc.lower()}-{date_str}.pdf" if brief_output is not None: logger.info("Saving locally...") brief_path = brief_output / filename with open(brief_path, "wb") as f: f.write(pdf.getbuffer()) logger.info("Brief saved") if dropbox_access_token is not None: logger.info("Uploading to Dropbox...") brief_path = Path("/") / filename try: dbx.files_upload(pdf.getvalue(), brief_path.as_posix()) except ApiError: logger.error("Brief exists!") raise logger.info("Brief uploaded")