def _get(self, remote_filename, local_path): commandline = "lftp -c \"source %s; get %s -o %s\"" % ( cmd_quote(self.tempname), cmd_quote(self.remote_path) + remote_filename, cmd_quote(local_path.name)) log.Debug("CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l))
def _get(self, remote_filename, local_path): # remote_path = os.path.join(urllib.unquote(self.parsed_url.path), remote_filename).rstrip() commandline = "lftp -c 'source \'%s\'; get \'%s\' -o \'%s\''" % \ (self.tempname, self.remote_path + remote_filename, local_path.name) log.Debug("CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l))
def user_authenticated(self): try: account = self.api_client.users_get_current_account() log.Debug("User authenticated as ,%s" % account) return True except: log.Debug('User not authenticated') return False
def _delete(self, filename): commandline = u"lftp -c \"source %s; cd %s; rm %s\"" % (cmd_quote( self.tempname), cmd_quote( self.remote_path), cmd_quote(util.fsdecode(filename))) log.Debug(u"CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug(u"STDERR:\n" u"%s" % (e)) log.Debug(u"STDOUT:\n" u"%s" % (l))
def _delete(self, filename): # remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip() commandline = "lftp -c 'source \'%s\'; cd \'%s\'; rm \'%s\''" % ( self.tempname, self.remote_path, filename) log.Debug("CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l))
def _put(self, source_path, remote_filename): # remote_path = os.path.join(urllib.unquote(self.parsed_url.path.lstrip('/')), remote_filename).rstrip() commandline = "lftp -c 'source \'%s\'; mkdir -p %s; put \'%s\' -o \'%s\''" % \ (self.tempname, self.remote_path, source_path.name, self.remote_path + remote_filename) log.Debug("CMD: %s" % commandline) s, l, e = self.subprocess_popen(commandline) log.Debug("STATUS: %s" % s) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l))
def _put(self, source_path, remote_filename): # Happily, the OneDrive API will lazily create the folder hierarchy required to contain a pathname # Check if the user has enough space available on OneDrive before even # attempting to upload the file. remote_filename = remote_filename.decode(u"UTF-8") source_size = os.path.getsize(source_path.name) start = time.time() response = self.http_client.get(self.API_URI + u'me/drive?$select=quota') response.raise_for_status() if (u'quota' in response.json()): available = response.json()[u'quota'].get(u'remaining', None) if available: log.Debug(u'Bytes available: %d' % available) if source_size > available: raise BackendException(( u'Out of space: trying to store "%s" (%d bytes), but only ' u'%d bytes available on OneDrive.' % ( source_path.name, source_size, available))) log.Debug(u"Checked quota in %fs" % (time.time() - start)) with source_path.open() as source_file: start = time.time() url = self.API_URI + self.directory_onedrive_path + remote_filename + u':/createUploadSession' response = self.http_client.post(url) response.raise_for_status() response_json = json.loads(response.content.decode(u"UTF-8")) if u'uploadUrl' not in response_json: raise BackendException(( u'File "%s" cannot be uploaded: could not create upload session: %s' % ( remote_filename, response.content))) uploadUrl = response_json[u'uploadUrl'] # https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_createuploadsession? # indicates 10 MiB is optimal for stable high speed connections. offset = 0 desired_num_fragments = old_div(10 * 1024 * 1024, self.REQUIRED_FRAGMENT_SIZE_MULTIPLE) while True: chunk = source_file.read(desired_num_fragments * self.REQUIRED_FRAGMENT_SIZE_MULTIPLE) if len(chunk) == 0: break headers = { u'Content-Length': u'%d' % (len(chunk)), u'Content-Range': u'bytes %d-%d/%d' % (offset, offset + len(chunk) - 1, source_size), } log.Debug(u'PUT %s %s' % (remote_filename, headers[u'Content-Range'])) response = self.http_client.put( uploadUrl, headers=headers, data=chunk) response.raise_for_status() offset += len(chunk) log.Debug(u"PUT file in %fs" % (time.time() - start))
def delete(self, filenames, raise_errors=False): log.Debug('jottacloud.delete: %s' % filenames) for filename in filenames: remote_name = os.path.join(self.folder.path, filename) #first, get file object f = self.client.getObject(remote_name) log.Debug('jottacloud.delete deleting: %s (%s)' % (f, type(f))) # now, delete it resp = f.delete() log.Debug('jottacloud.delete(%s): %s' % (remote_name, resp))
def delete(self, filename_list): """Delete files in filename_list""" if not filename_list: log.Debug('dpbx.delete(): no op') return remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip() for filename in filename_list: remote_name = os.path.join(remote_dir, filename) resp = self.api_client.file_delete(remote_name) log.Debug('dpbx.delete(%s): %s' % (remote_name, resp))
def _put(self, source_path, remote_filename): commandline = "lftp -c \"source %s; mkdir -p %s; put %s -o %s\"" % ( self.tempname, cmd_quote( self.remote_path), cmd_quote(source_path.name), cmd_quote(self.remote_path) + remote_filename) log.Debug("CMD: %s" % commandline) s, l, e = self.subprocess_popen(commandline) log.Debug("STATUS: %s" % s) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l))
def list(self, raise_errors=False): log.Debug('jottacloud.list raise e %s' % (raise_errors)) log.Debug('jottacloud.list: %s' % (self.folder.files())) encoding = locale.getdefaultlocale()[1] if encoding is None: encoding = 'LATIN1' return list([ f.name.encode(encoding) for f in self.folder.files() if not f.is_deleted() ])
def _query(self, filename): if not self.user_authenticated(): self.login() remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')) remote_path = '/' + os.path.join(remote_dir, filename).rstrip() log.Debug('dpbx.files_get_metadata(%s)' % remote_path) info = self.api_client.files_get_metadata(remote_path) log.Debug('dpbx.files_get_metadata(%s): %s' % (remote_path, info)) return {'size': info.size}
def _get(self, remote_filename, local_path): if isinstance(remote_filename, b"".__class__): remote_filename = util.fsdecode(remote_filename) commandline = u"lftp -c \"source %s; get %s -o %s\"" % ( cmd_quote(self.tempname), cmd_quote(self.remote_path) + remote_filename, cmd_quote(local_path.uc_name)) log.Debug(u"CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug(u"STDERR:\n" u"%s" % (e)) log.Debug(u"STDOUT:\n" u"%s" % (l))
def request(self, url, method="GET", body=None, headers={}, ignore=None): url, oauth_header = self._get_oauth_request_header(url, method) headers.update(oauth_header) for n in range(1, globals.num_retries + 1): log.Info("making %s request to %s (attempt %d)" % (method, url, n)) try: resp, content = self.client.request(url, method, headers=headers, body=body) except Exception, e: log.Info("request failed, exception %s" % e) log.Debug("Backtrace of previous error: %s" % duplicity.util.exception_traceback()) if n == globals.num_retries: log.FatalError( "Giving up on request after %d attempts, last exception %s" % (n, e)) if isinstance(body, file): body.seek( 0) # Go to the beginning of the file for the retry time.sleep(30) continue log.Info("completed request with status %s %s" % (resp.status, resp.reason)) oops_id = resp.get('x-oops-id', None) if oops_id: log.Debug("Server Error: method %s url %s Oops-ID %s" % (method, url, oops_id)) if resp['content-type'] == 'application/json': content = loads(content) # were we successful? status either 2xx or code we're told to ignore numcode = int(resp.status) if (numcode >= 200 and numcode < 300) or (ignore and numcode in ignore): return resp, content ecode = log.ErrorCode.backend_error if numcode == 402: # Payment Required ecode = log.ErrorCode.backend_no_space elif numcode == 404: ecode = log.ErrorCode.backend_not_found if isinstance(body, file): body.seek(0) # Go to the beginning of the file for the retry if n < globals.num_retries: time.sleep(30)
def _put(self, source_path, remote_filename): if isinstance(remote_filename, b"".__class__): remote_filename = util.fsdecode(remote_filename) commandline = u"lftp -c \"source %s; mkdir -p %s; put %s -o %s\"" % ( self.tempname, cmd_quote( self.remote_path), cmd_quote(source_path.uc_name), cmd_quote(self.remote_path) + util.fsdecode(remote_filename)) log.Debug(u"CMD: %s" % commandline) s, l, e = self.subprocess_popen(commandline) log.Debug(u"STATUS: %s" % s) log.Debug(u"STDERR:\n" u"%s" % (e)) log.Debug(u"STDOUT:\n" u"%s" % (l))
def set_values(self, sig_chain_warning=1): """ Set values from archive_dir_path and backend. Returns self for convenience. If sig_chain_warning is set to None, do not warn about unnecessary sig chains. This is because there may naturally be some unecessary ones after a full backup. """ self.values_set = 1 # get remote filename list backend_filename_list = self.backend.list() log.Debug(ngettext("%d file exists on backend", "%d files exist on backend", len(backend_filename_list)) % len(backend_filename_list)) # get local filename list if self.action not in ["collection-status", "replicate"]: local_filename_list = self.archive_dir_path.listdir() else: local_filename_list = [] log.Debug(ngettext("%d file exists in cache", "%d files exist in cache", len(local_filename_list)) % len(local_filename_list)) # check for partial backups partials = [] for local_filename in local_filename_list: pr = file_naming.parse(local_filename) if pr and pr.partial: partials.append(local_filename) # get various backup sets and chains (backup_chains, self.orphaned_backup_sets, self.incomplete_backup_sets) = \ self.get_backup_chains(partials + backend_filename_list) backup_chains = self.get_sorted_chains(backup_chains) self.all_backup_chains = backup_chains assert len(backup_chains) == len(self.all_backup_chains), \ "get_sorted_chains() did something more than re-ordering" local_sig_chains, self.local_orphaned_sig_names = \ self.get_signature_chains(True) remote_sig_chains, self.remote_orphaned_sig_names = \ self.get_signature_chains(False, filelist=backend_filename_list) self.set_matched_chain_pair(local_sig_chains + remote_sig_chains, backup_chains) self.warn(sig_chain_warning) return self
def list(self): """List files in directory""" # Do a long listing to avoid connection reset log.Debug("Listing " + self.url_path) listing = [] try: for file in self.api.list_directory(self.url_path)[0]: log.Debug("Found: " + file[2]) listing.append(file[2]) except Exception: #The library will fail with EsuException: EsuException when remote folder doesn't exist pass return listing
def _list(self): # Do a long listing to avoid connection reset # remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip() remote_dir = urllib.unquote(self.parsed_url.path) # print remote_dir commandline = "lftp -c 'source \'%s\'; cd \'%s\' || exit 0; ls'" % ( self.tempname, self.remote_path) log.Debug("CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug("STDERR:\n" "%s" % (e)) log.Debug("STDOUT:\n" "%s" % (l)) # Look for our files as the last element of a long list line return [x.split()[-1] for x in l.split('\n') if x]
def _setup_pool(self): number_of_procs = globals.s3_multipart_max_procs if not number_of_procs: number_of_procs = multiprocessing.cpu_count() if getattr(self, '_pool', False): log.Debug("A process pool already exists. Destroying previous pool.") self._pool.terminate() self._pool.join() self._pool = None log.Debug("Setting multipart boto backend process pool to %d processes" % number_of_procs) self._pool = multiprocessing.Pool(processes=number_of_procs)
def add_to_sets(filename): """ Try adding filename to existing sets, or make new one """ for set in sets: if set.add_filename(filename): log.Debug(_("File %s is part of known set") % (util.ufn(filename),)) break else: log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),)) new_set = BackupSet(self.backend, self.action) if new_set.add_filename(filename): sets.append(new_set) else: log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
def release_lockfile(): if config.lockfile: log.Debug(_(u"Releasing lockfile %s") % config.lockpath) try: config.lockfile.release() except Exception: pass
def release_lockfile(): if globals.lockfile and globals.lockfile.is_locked(): log.Debug(_("Releasing lockfile %s") % globals.lockfile.lock_file) try: globals.lockfile.release() except UnlockError: pass
def release_lockfile(): if globals.lockfile: log.Debug(_("Releasing lockfile %s") % globals.lockpath) try: globals.lockfile.release() except Exception: pass
def _list(self): response = None try: self.headers[u'Depth'] = u"1" response = self.request(u"PROPFIND", self.directory, self.listbody) del self.headers[u'Depth'] # if the target collection does not exist, create it. if response.status == 404: response.close() # otherwise next request fails with ResponseNotReady self.makedir() # just created an empty folder, so return empty return [] elif response.status in [200, 207]: document = response.read() response.close() else: status = response.status reason = response.reason response.close() raise BackendException(u"Bad status code %s reason %s." % (status, reason)) log.Debug(u"%s" % (document,)) dom = xml.dom.minidom.parseString(document) result = [] for href in dom.getElementsByTagName(u'd:href') + dom.getElementsByTagName(u'D:href'): filename = self.taste_href(href) if filename: result.append(filename) return result except Exception as e: raise e finally: if response: response.close()
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # Import JottaCloud libraries. try: from jottalib import JFS from jottalib.JFS import JFSNotFoundError, JFSIncompleteFile except ImportError: raise BackendException( u'JottaCloud backend requires jottalib' u' (see https://pypi.python.org/pypi/jottalib).') # Set jottalib loggers to the same verbosity as duplicity duplicity_log_level = get_duplicity_log_level() set_jottalib_logging_level(duplicity_log_level) # Ensure jottalib and duplicity log to the same handlers set_jottalib_log_handlers(log._logger.handlers) # Will fetch jottacloud auth from environment or .netrc self.client = JFS.JFS() self.folder = self.get_or_create_directory( parsed_url.path.lstrip(u'/')) log.Debug(u"Jottacloud folder for duplicity: %r" % self.folder.path)
def resolve_directory(self): """Ensures self.directory_id contains the folder id for the path. There is no API call to resolve a logical path (e.g. /backups/duplicity/notebook/), so we recursively list directories until we get the object id of the configured directory, creating directories as necessary. """ object_id = 'me/skydrive' for component in [x for x in self.directory.split('/') if x]: tried_mkdir = False while True: files = self.get_files(object_id) names_to_ids = {x['name']: x['id'] for x in files} if component not in names_to_ids: if not tried_mkdir: self.mkdir(object_id, component) tried_mkdir = True continue raise BackendException( ('Could not resolve/create directory "%s" on ' 'OneDrive: %s not in %s (files of folder %s)' % (self.directory, component, names_to_ids.keys(), object_id))) break object_id = names_to_ids[component] self.directory_id = object_id log.Debug('OneDrive id for the configured directory "%s" is "%s"' % (self.directory, self.directory_id))
def resetConnection(self): parsed_url = self.url try: imap_server = os.environ[u'IMAP_SERVER'] except KeyError: imap_server = parsed_url.hostname # Try to close the connection cleanly try: self.conn.close() except Exception: pass if (parsed_url.scheme == u"imap"): cl = imaplib.IMAP4 self.conn = cl(imap_server, 143) elif (parsed_url.scheme == u"imaps"): cl = imaplib.IMAP4_SSL self.conn = cl(imap_server, 993) log.Debug(u"Type of imap class: %s" % (cl.__name__)) self.remote_dir = re.sub(r'^/', r'', parsed_url.path, 1) # Login if (not (globals.imap_full_address)): self.conn.login(self.username, self.password) self.conn.select(globals.imap_mailbox) log.Info(u"IMAP connected") else: self.conn.login(self.username + u"@" + parsed_url.hostname, self.password) self.conn.select(globals.imap_mailbox) log.Info(u"IMAP connected")
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) log.Debug(u"I'm %s (scheme %s) connecting to %s as %s" % (self.__class__.__name__, parsed_url.scheme, parsed_url.hostname, parsed_url.username)) # Store url for reconnection on error self.url = parsed_url # Set the username if (parsed_url.username is None): username = eval(input(u'Enter account userid: ')) else: username = parsed_url.username # Set the password if (not parsed_url.password): if u'IMAP_PASSWORD' in os.environ: password = os.environ.get(u'IMAP_PASSWORD') else: password = getpass.getpass(u"Enter account password: ") else: password = parsed_url.password self.username = username self.password = password self.resetConnection()
def _delete(self, filename): # - Delete one file # - Retried if an exception is thrown remote_path = posixpath.join(self.folder.path, filename) remote_file = self.client.getObject(remote_path) log.Debug(u'jottacloud.delete deleting: %s (%s)' % (remote_file, type(remote_file))) remote_file.delete()
def _list(self): # Do a long listing to avoid connection reset # remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip() remote_dir = urllib.parse.unquote(self.parsed_url.path) # print remote_dir quoted_path = cmd_quote(self.remote_path) # failing to cd into the folder might be because it was not created already commandline = u"lftp -c \"source %s; ( cd %s && ls ) || ( mkdir -p %s && cd %s && ls )\"" % ( cmd_quote(self.tempname), quoted_path, quoted_path, quoted_path) log.Debug(u"CMD: %s" % commandline) _, l, e = self.subprocess_popen(commandline) log.Debug(u"STDERR:\n" u"%s" % (e)) log.Debug(u"STDOUT:\n" u"%s" % (l)) # Look for our files as the last element of a long list line return [x.split()[-1] for x in l.split(b'\n') if x]