def __init__(self, parsed_url): try: import mediafire.client except ImportError as e: raise BackendException(u"""\ Mediafire backend requires the mediafire library. Exception: %s""" % str(e)) duplicity.backend.Backend.__init__(self, parsed_url) mediafire_email = parsed_url.username mediafire_password = self.get_password() self._file_res = mediafire.client.File self._folder_res = mediafire.client.Folder self._downloaderror_exc = mediafire.client.DownloadError self._notfound_exc = mediafire.client.ResourceNotFoundError self.client = mediafire.client.MediaFireClient() self.client.login(app_id=DUPLICITY_APP_ID, email=mediafire_email, password=mediafire_password) # //username:password@host/path/to/folder -> path/to/folder uri = u'mf:///' + parsed_url.path.split(u'/', 3)[3] # Create folder if it does not exist and make sure it is private # See MediaFire Account Settings /Security and Privacy / Share Link # to set "Inherit from parent folder" try: folder = self.client.get_resource_by_uri(uri) if not isinstance(folder, self._folder_res): raise BackendException(u"target_url already exists " u"and is not a folder") except mediafire.client.ResourceNotFoundError: # force folder to be private folder = self.client.create_folder(uri, recursive=True) self.client.update_folder_metadata(uri, privacy=u'private') self.folder = folder
def wrapper(self, *args): if login_required and not self.sess.is_linked(): raise BackendException( "dpbx Cannot login: check your credentials", log.ErrorCode.dpbx_nologin) return try: return f(self, *args) except TypeError as e: log_exception(e) raise BackendException('dpbx type error "%s"' % (e, )) except rest.ErrorResponse as e: msg = e.user_error_msg or util.uexc(e) log.Error('dpbx error: %s' % (msg, ), log.ErrorCode.backend_command_error) raise e except Exception as e: log_exception(e) log.Error('dpbx code error "%s"' % (e, ), log.ErrorCode.backend_code_error) raise e
def __init__(self, parsed_url): try: from cloudfiles import Connection from cloudfiles.errors import ResponseError from cloudfiles import consts except ImportError as e: raise BackendException("""\ Cloudfiles backend requires the cloudfiles library available from Rackspace. Exception: %s""" % str(e)) self.resp_exc = ResponseError conn_kwargs = {} if 'CLOUDFILES_USERNAME' not in os.environ: raise BackendException('CLOUDFILES_USERNAME environment variable' 'not set.') if 'CLOUDFILES_APIKEY' not in os.environ: raise BackendException( 'CLOUDFILES_APIKEY environment variable not set.') conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME'] conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY'] if 'CLOUDFILES_AUTHURL' in os.environ: conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL'] else: conn_kwargs['authurl'] = consts.default_authurl container = parsed_url.path.lstrip('/') try: conn = Connection(**conn_kwargs) except Exception as e: log.FatalError( "Connection failed, please check your credentials: %s %s" % (e.__class__.__name__, util.uexc(e)), log.ErrorCode.connection_failed) self.container = conn.create_container(container)
def runremote(self, cmd, ignoreexitcode=False, errorprefix=""): """small convenience function that opens a shell channel, runs remote command and returns stdout of command. throws an exception if exit code!=0 and not ignored""" try: ch_in, ch_out, ch_err = self.client.exec_command( cmd, -1, globals.timeout) output = ch_out.read(-1) return output except Exception as e: if not ignoreexitcode: raise BackendException("%sfailed: %s \n %s" % (errorprefix, ch_err.read(-1), e))
def subprocess_popen(self, commandline): """ Execute the given command line with error check. Returns int Exitcode, string StdOut, string StdErr Raise a BackendException on failure. """ private = self.munge_password(commandline) log.Info(_("Reading results of '%s'") % private) result, stdout, stderr = self._subprocess_popen(commandline) if result != 0: raise BackendException("Error running '%s'" % private) return result, stdout, stderr
def _put(self, source_path, remote_filename): remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, remote_filename).rstrip() file_size = os.path.getsize(source_path.name) progress.report_transfer(0, file_size) if file_size < DPBX_UPLOAD_CHUNK_SIZE: # Upload whole file at once to avoid extra server request res_metadata = self.put_file_small(source_path, remote_path) else: res_metadata = self.put_file_chunked(source_path, remote_path) # A few sanity checks if res_metadata.path_display != remote_path: raise BackendException( 'dpbx: result path mismatch: %s (expected: %s)' % (res_metadata.path_display, remote_path)) if res_metadata.size != file_size: raise BackendException( 'dpbx: result size mismatch: %s (expected: %s)' % (res_metadata.size, file_size))
def gethostconfig(self, file, host): file = os.path.expanduser(file) if not os.path.isfile(file): return {} sshconfig = paramiko.SSHConfig() try: sshconfig.parse(open(file)) except Exception as e: raise BackendException("could not load '%s', maybe corrupt?" % (file)) return sshconfig.lookup(host)
def _get(self, remote_filename, local_path): files = self.client.get_files() entries = self.__filter_entries(files, self.folder, remote_filename, 'file') if len(entries): # get first matching remote file entry = entries.keys()[0] self.client.download((entry, entries[entry]), dest_filename=local_path.name) else: raise BackendException( "Failed to find file '%s' in remote folder '%s'" % (remote_filename, self.__get_node_name(self.folder)), code=log.ErrorCode.backend_not_found)
def _put(self, source_path, remote_filename): ssrcp = source_path.uc_name sremf = remote_filename.decode("utf-8", errors="ignore") sremp = self.remote_path temp_dir = os.path.dirname(ssrcp) temp_filename = os.path.basename(ssrcp) os.rename(ssrcp, os.path.join(temp_dir, sremf)) commandline = u"%s copy --include %s %s %s" % (self.rclone_cmd, sremf, temp_dir, sremp) rc, o, e = self._subprocess(commandline) if rc != 0: os.rename(os.path.join(temp_dir, sremf), ssrcp) raise BackendException(e.split(b'\n')[0]) os.rename(os.path.join(temp_dir, sremf), ssrcp)
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # Sanity check : ensure all the necessary "MEGAcmd" binaries exist self._check_binary_exists(u'mega-login') self._check_binary_exists(u'mega-logout') self._check_binary_exists(u'mega-cmd') self._check_binary_exists(u'mega-cmd-server') self._check_binary_exists(u'mega-ls') self._check_binary_exists(u'mega-mkdir') self._check_binary_exists(u'mega-get') self._check_binary_exists(u'mega-put') self._check_binary_exists(u'mega-rm') # "MEGAcmd" does not use a config file, however it is handy to keep one (with the old ".megarc" format) to # securely store the username and password self._hostname = parsed_url.hostname if parsed_url.password is None: self._megarc = os.getenv(u'HOME') + u'/.megav2rc' try: conf_file = open(self._megarc, u"r") except Exception as e: raise BackendException( u"No password provided in URL and MEGA configuration " u"file for duplicity does not exist as '%s'" % (self._megarc, )) myvars = {} for line in conf_file: name, var = line.partition(u"=")[::2] myvars[name.strip()] = str(var.strip()) conf_file.close() self._username = myvars[u"Username"] self._password = myvars[u"Password"] else: self._username = parsed_url.username self._password = self.get_password() # Remote folder ("MEGAcmd" no longer shows "Root/" at the top of the hierarchy) self._folder = u'/' + parsed_url.path[1:] # Only create the remote folder if it doesn't exist yet self.mega_login() cmd = [u'mega-ls', self._folder] try: self.subprocess_popen(cmd) except Exception as e: self._makedir(self._folder)
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # Import Microsoft Azure Storage SDK for Python library. try: import azure import azure.storage import azure.storage.blob from azure.storage.blob import BlobServiceClient except ImportError as e: raise BackendException(u"""\ Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/). Exception: %s""" % str(e)) # TODO: validate container name self.container_name = parsed_url.path.lstrip(u'/') if u'AZURE_CONNECTION_STRING' not in os.environ: raise BackendException( u'AZURE_CONNECTION_STRING environment variable not set.') kwargs = {} if config.timeout: kwargs[u'timeout'] = config.timeout if config.azure_max_single_put_size: kwargs[u'max_single_put_size'] = config.azure_max_single_put_size if config.azure_max_block_size: kwargs[u'max_block_size'] = config.azure_max_single_put_size conn_str = os.environ[u'AZURE_CONNECTION_STRING'] self.blob_service = BlobServiceClient.from_connection_string( conn_str, None, **kwargs) self._get_or_create_container()
def _get(self, remote_filename, local_path): with local_path.open('wb') as f: file_id = self.get_file_id(remote_filename) if file_id is None: raise BackendException( ('File "%s" cannot be downloaded: it does not exist' % (remote_filename))) response = self.http_client.get(self.API_URI + file_id + '/content', stream=True) response.raise_for_status() for chunk in response.iter_content(chunk_size=4096): if chunk: f.write(chunk) f.flush()
def _list(self): filelist = [] commandline = "%s ls %s" % (self.rclone_cmd, self.remote_path) rc, o, e = self._subprocess(commandline) if rc != 0: if e.endswith("not found\n"): return filelist else: raise BackendException(e.split('\n')[0]) if not o: return filelist lines = o.split('\n') for x in lines: if x: filelist.append(x.split()[-1]) return filelist
def _get(self, remote_filename, local_path): """Download file from Amazon Drive""" with local_path.open('wb') as local_file: file_id = self.get_file_id(remote_filename) if file_id is None: raise BackendException( 'File "%s" cannot be downloaded: it does not exist' % remote_filename) response = self.http_client.get( self.content_url + '/nodes/' + file_id + '/content', stream=True) response.raise_for_status() for chunk in response.iter_content(chunk_size=DEFAULT_BUFFER_SIZE): if chunk: local_file.write(chunk) local_file.flush()
def subprocess_popen_persist(self, commandline): """ Execute the given command line with error check. Retries globals.num_retries times with 30s delay. Returns int Exitcode, string StdOut, string StdErr Raise a BackendException on failure. """ private = self.munge_password(commandline) for n in range(1, globals.num_retries + 1): # sleep before retry if n > 1: time.sleep(30) log.Info(_("Reading results of '%s'") % private) result, stdout, stderr = self._subprocess_popen(commandline) if result == 0: return result, stdout, stderr try: m = re.search("^\s*([\S]+)", commandline) cmd = m.group(1) ignores = self.popen_persist_breaks[cmd] ignores.index(result) """ ignore a predefined set of error codes """ return 0, '', '' except (KeyError, ValueError): pass log.Warn( gettext.ngettext( "Running '%s' failed with code %d (attempt #%d)", "Running '%s' failed with code %d (attempt #%d)", n) % (private, result, n)) if stdout or stderr: log.Warn( _("Error is:\n%s") % stderr + (stderr and stdout and "\n") + stdout) log.Warn( gettext.ngettext( "Giving up trying to execute '%s' after %d attempt", "Giving up trying to execute '%s' after %d attempts", globals.num_retries) % (private, globals.num_retries)) raise BackendException("Error running '%s'" % private)
def _delete(self, filename): url = self.directory + util.fsdecode(filename) response = None try: response = self.request(u"DELETE", url) if response.status in [200, 204]: response.read() response.close() else: status = response.status reason = response.reason response.close() raise BackendException(_(u"WebDAV DEL Bad status code %s reason %s.") % (status, reason)) except Exception as e: raise e finally: if response: response.close()
def __init__(self, parsed_url): """ Authorize to B2 api and set up needed variables """ duplicity.backend.Backend.__init__(self, parsed_url) # for prettier password prompt only self.parsed_url.hostname = 'B2' self.account_id = parsed_url.username account_key = self.get_password() self.url_parts = [ x for x in parsed_url.path.replace("@", "/").split('/') if x != '' ] if self.url_parts: self.username = self.url_parts.pop(0) self.bucket_name = self.url_parts.pop(0) else: raise BackendException("B2 requires a bucket name") self.path = "/".join(self.url_parts) id_and_key = self.account_id + ":" + account_key basic_auth_string = 'Basic ' + base64.b64encode(id_and_key) headers = {'Authorization': basic_auth_string} request = urllib2.Request( 'https://api.backblaze.com/b2api/v1/b2_authorize_account', headers=headers ) response = urllib2.urlopen(request) response_data = json.loads(response.read()) response.close() self.auth_token = response_data['authorizationToken'] self.api_url = response_data['apiUrl'] self.download_url = response_data['downloadUrl'] try: self.find_or_create_bucket(self.bucket_name) except urllib2.HTTPError: raise FatalBackendException("Bucket cannot be created")
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) try: import boto from boto.s3.connection import Location except ImportError: raise assert boto.Version >= BOTO_MIN_VERSION # This folds the null prefix and all null parts, which means that: # //MyBucket/ and //MyBucket are equivalent. # //MyBucket//My///My/Prefix/ and //MyBucket/My/Prefix are equivalent. self.url_parts = [x for x in parsed_url.path.split('/') if x != ''] if self.url_parts: self.bucket_name = self.url_parts.pop(0) else: # Duplicity hangs if boto gets a null bucket name. # HC: Caught a socket error, trying to recover raise BackendException('Boto requires a bucket name.') self.scheme = parsed_url.scheme if self.url_parts: self.key_prefix = '%s/' % '/'.join(self.url_parts) else: self.key_prefix = '' self.straight_url = duplicity.backend.strip_auth_from_url(parsed_url) self.parsed_url = parsed_url # duplicity and boto.storage_uri() have different URI formats. # boto uses scheme://bucket[/name] and specifies hostname on connect() self.boto_uri_str = '://'.join( (parsed_url.scheme[:2], parsed_url.path.lstrip('/'))) if globals.s3_european_buckets: self.my_location = Location.EU else: self.my_location = '' self.resetConnection() self._listed_keys = {}
def _get(self, remote_filename, local_path): # since the backend operations will be retried, we can't # simply try to get from the store, if not found, move to the # next store (since each failure will be retried n times # before finally giving up). So we need to get the list first # before we try to fetch # ENHANCEME: maintain a cached list for each store for s in self.__stores: list = s.list() if remote_filename in list: s.get(remote_filename, local_path) return log.Log(_("MultiBackend: failed to get %s to %s from %s") % (remote_filename, local_path, s.backend.parsed_url.url_string), log.INFO) log.Log(_("MultiBackend: failed to get %s. Tried all backing stores and none succeeded") % (remote_filename), log.ERROR) raise BackendException("failed to get")
def request(self, commandline): # request for commands returning data in XML format log.Debug(u"Request command: {0}".format(commandline)) try: _, reply, error = self.subprocess_popen(commandline) except KeyError: raise BackendException( u"Unknown protocol failure on request {0}".format(commandline)) response = reply + error try: xml = u"<root>" + u''.join(re.findall(u"<[^>]+>", response)) + u"</root>" el = ET.fromstring(xml) except: el = None log.Debug(u"Request response: {0}".format(response)) return el
def _list(self): accum = [] next_url = self.API_URI + self.directory_onedrive_path + u':/children' while True: response = self.http_client.get(next_url) if response.status_code == 404: # No further files here break response.raise_for_status() responseJson = response.json() if u'value' not in responseJson: raise BackendException( (u'Malformed JSON: expected "value" member in %s' % (responseJson))) accum += responseJson[u'value'] if u'@odata.nextLink' in responseJson: next_url = responseJson[u'@odata.nextLink'] else: break return [x[u'name'] for x in accum]
def _put(self, source_path, remote_filename): url = self.directory + util.fsdecode(remote_filename) response = None try: source_file = source_path.open(u"rb") response = self.request(u"PUT", url, source_file.read()) # 200 is returned if a file is overwritten during restarting if response.status in [200, 201, 204]: response.read() response.close() else: status = response.status reason = response.reason response.close() raise BackendException(_(u"WebDAV PUT Bad status code %s reason %s.") % (status, reason)) except Exception as e: raise e finally: if response: response.close()
def _authorize(self, email, password, captcha_token=None, captcha_response=None): try: self.client.client_login(email, password, source='duplicity $version', service='writely', captcha_token=captcha_token, captcha_response=captcha_response) except gdata.client.CaptchaChallenge as challenge: print('A captcha challenge in required. Please visit ' + challenge.captcha_url) answer = None while not answer: answer = raw_input('Answer to the challenge? ') self._authorize(email, password, challenge.captcha_token, answer) except gdata.client.BadAuthentication: raise BackendException( 'Invalid user credentials given. Be aware that accounts ' 'that use 2-step verification require creating an application specific ' 'access code for using this Duplicity backend. Follow the instruction in ' 'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 ' 'and create your application-specific password to run duplicity backups.')
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # This folds the null prefix and all null parts, which means that: # //MyBucket/ and //MyBucket are equivalent. # //MyBucket//My///My/Prefix/ and //MyBucket/My/Prefix are equivalent. url_path_parts = [x for x in parsed_url.path.split(u'/') if x != u''] if url_path_parts: self.bucket_name = url_path_parts.pop(0) else: raise BackendException(u'S3 requires a bucket name.') if url_path_parts: self.key_prefix = u'%s/' % u'/'.join(url_path_parts) else: self.key_prefix = u'' self.parsed_url = parsed_url self.straight_url = duplicity.backend.strip_auth_from_url(parsed_url) self.s3 = None self.bucket = None self.tracker = UploadProgressTracker()
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) try: from dropbox import Dropbox from dropbox.exceptions import AuthError, BadInputError, ApiError from dropbox.files import (UploadSessionCursor, CommitInfo, WriteMode, GetMetadataError, DeleteError, UploadSessionLookupError, ListFolderError) from dropbox.oauth import DropboxOAuth2FlowNoRedirect except ImportError as e: raise BackendException("""\ This backend requires the dropbox package version 6.9.0 To install use "sudo pip install dropbox==6.9.0" Exception: %s""" % str(e)) self.api_account = None self.api_client = None self.auth_flow = None self.login()
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # Import JottaCloud libraries. try: from jottalib import JFS except ImportError: raise BackendException('JottaCloud backend requires jottalib' ' (see https://pypi.python.org/pypi/jottalib).') # Set jottalib loggers to the same verbosity as duplicity duplicity_log_level = get_duplicity_log_level() set_jottalib_logging_level(duplicity_log_level) # Ensure jottalib and duplicity log to the same handlers set_jottalib_log_handlers(log._logger.handlers) # Will fetch jottacloud auth from environment or .netrc self.client = JFS.JFS() self.folder = self.get_or_create_directory(parsed_url.path.lstrip('/')) log.Debug("Jottacloud folder for duplicity: %r" % self.folder.path)
def subprocess_popen(self, commandline): """ Execute the given command line with error check. Returns int Exitcode, string StdOut, string StdErr Raise a BackendException on failure. """ private = self.munge_password(commandline) log.Info(_("Reading results of '%s'") % private) result, stdout, stderr = self.__subprocess_popen(commandline) if result != 0: try: m = re.search("^\s*([\S]+)", commandline) cmd = m.group(1) ignores = self.popen_breaks[cmd] ignores.index(result) """ ignore a predefined set of error codes """ return 0, '', '' except (KeyError, ValueError): raise BackendException("Error running '%s': returned %d, with output:\n%s" % (private, result, stdout + '\n' + stderr)) return result, stdout, stderr
def login(self): if self.load_access_token() is None: self.obtain_access_token() self.api_client = Dropbox(self.load_access_token()) self.api_account = None try: log.Debug(u'dpbx,users_get_current_account([token])') self.api_account = self.api_client.users_get_current_account() log.Debug(u"dpbx,%s" % self.api_account) except (BadInputError, AuthError) as e: log.Debug(u'dpbx,exception: %s' % e) log.Info(u"dpbx: Authentication failed. Trying to obtain new access token") self.obtain_access_token() # We're assuming obtain_access_token will throw exception. # So this line should not be reached raise BackendException(u"dpbx: Please update DPBX_ACCESS_TOKEN and try again") log.Info(u"dpbx: Successfully authenticated as %s" % self.api_account.name.display_name)
def __init__(self): # OAUTHLIB_RELAX_TOKEN_SCOPE prevents the oauthlib from complaining # about a mismatch between the requested scope and the delivered scope. # We need this because we don't get a refresh token without asking for # offline_access, but Microsoft Graph doesn't include offline_access # in its response (even though it does send a refresh_token). os.environ[u'OAUTHLIB_RELAX_TOKEN_SCOPE'] = u'TRUE' # Import requests-oauthlib try: # On debian (and derivatives), get these dependencies using: # apt-get install python-requests-oauthlib # On fedora (and derivatives), get these dependencies using: # yum install python-requests-oauthlib from requests_oauthlib import OAuth2Session self.session_class = OAuth2Session except ImportError as e: raise BackendException( (u'OneDrive backend requires python-requests-oauthlib to be ' u'installed. Please install it and try again.\n' + str(e))) # Should be filled by a subclass self.session = None
def makedir(self): u"""Make (nested) directories on the server.""" dirs = self.directory.split(u"/") # url causes directory to start with /, but it might be given # with or without trailing / (which is required) if dirs[-1] == u'': dirs = dirs[0:-1] for i in range(1, len(dirs)): d = u"/".join(dirs[0:i + 1]) + u"/" self.headers[u'Depth'] = u"1" response = self.request(u"PROPFIND", d) del self.headers[u'Depth'] log.Info(u"Checking existence dir %s: %d" % (d, response.status)) if response.status == 404: log.Info(_(u"Creating missing directory %s") % d) res = self.request(u"MKCOL", d) if res.status != 201: raise BackendException(_(u"WebDAV MKCOL %s failed: %s %s") % (d, res.status, res.reason))