def __eq__(self, other): u""" Two manifests are equal if they contain the same volume infos """ vi_list1 = list(self.volume_info_dict.keys()) vi_list1.sort() vi_list2 = list(other.volume_info_dict.keys()) vi_list2.sort() if vi_list1 != vi_list2: log.Notice( _(u"Manifests not equal because different volume numbers")) return False for i in range(len(vi_list1)): if not vi_list1[i] == vi_list2[i]: log.Notice( _(u"Manifests not equal because volume lists differ")) return False if (self.hostname != other.hostname or self.local_dirname != other.local_dirname): log.Notice( _(u"Manifests not equal because hosts or directories differ")) return False return True
def pre_process_download_batch(self, remote_filenames): u""" This is called before downloading volumes from this backend by main engine. For PCA, volumes passed as argument need to be unsealed. This method is blocking, showing a status at regular interval """ retry_interval = 60 # status will be shown every 60s # remote_filenames are bytes string u_remote_filenames = list(map(util.fsdecode, remote_filenames)) objs = self.__list_objs(ffilter=lambda x: x[u'name'] in u_remote_filenames) # first step: retrieve pca seal status for all required volumes # and launch unseal for all sealed files one_object_not_unsealed = False for o in objs: filename = util.fsdecode(o[u'name']) # see ovh documentation for policy_retrieval_state definition policy_retrieval_state = o[u'policy_retrieval_state'] log.Info(u"Volume %s. State : %s. " % (filename, policy_retrieval_state)) if policy_retrieval_state == u'sealed': log.Notice(u"Launching unseal of volume %s." % (filename)) self.unseal(o[u'name']) one_object_not_unsealed = True elif policy_retrieval_state == u"unsealing": one_object_not_unsealed = True # second step: display estimated time for last volume unseal # and loop until all volumes are unsealed while one_object_not_unsealed: one_object_not_unsealed = self.unseal_status(u_remote_filenames) time.sleep(retry_interval) # might be a good idea to show a progress bar here... else: log.Notice(u"All volumes to download are unsealed.")
def unseal_status(self, u_remote_filenames): u""" Shows unsealing status for input volumes """ one_object_not_unsealed = False objs = self.__list_objs(ffilter=lambda x: util.fsdecode(x[u'name']) in u_remote_filenames) max_duration = 0 for o in objs: policy_retrieval_state = o[u'policy_retrieval_state'] filename = util.fsdecode(o[u'name']) if policy_retrieval_state == u'sealed': log.Notice(u"Error: volume is still in sealed state : %s." % (filename)) log.Notice(u"Launching unseal of volume %s." % (filename)) self.unseal(o[u'name']) one_object_not_unsealed = True elif policy_retrieval_state == u"unsealing": duration = int(o[u'policy_retrieval_delay']) log.Info(u"%s available in %d seconds." % (filename, duration)) if duration > max_duration: max_duration = duration one_object_not_unsealed = True m, s = divmod(max_duration, 60) h, m = divmod(m, 60) max_duration_eta = u"%dh%02dm%02ds" % (h, m, s) log.Notice(u"Need to wait %s before all volumes are unsealed." % (max_duration_eta)) return one_object_not_unsealed
def filelist_get_sf(self, filelist_fp, inc_default, filelist_name): """Return selection function by reading list of files The format of the filelist is documented in the man page. filelist_fp should be an (open) file object. inc_default should be true if this is an include list, false for an exclude list. filelist_name is just a string used for logging. """ log.Notice(_("Reading filelist %s") % filelist_name) tuple_list, something_excluded = \ self.filelist_read(filelist_fp, inc_default, filelist_name) log.Notice(_("Sorting filelist %s") % filelist_name) tuple_list.sort() i = [0] # We have to put index in list because of stupid scoping rules def selection_function(path): while 1: if i[0] >= len(tuple_list): return None include, move_on = \ self.filelist_pair_match(path, tuple_list[i[0]]) if move_on: i[0] += 1 if include is None: continue # later line may match return include selection_function.exclude = something_excluded or inc_default == 0 selection_function.name = "Filelist: " + filelist_name return selection_function
def _delete_list(self, filename_list): for filename in filename_list: flist = self.imapf(self.conn.search, None, u"(SUBJECT %s)" % filename) flist = flist[0].split() if len(flist) > 0 and flist[0] != u"": self.delete_single_mail(flist[0]) log.Notice(u"marked %s to be deleted" % filename) self.expunge() log.Notice(u"IMAP expunged %s files" % len(filename_list))
def delete(self, filename_list): assert len(filename_list) > 0 for filename in filename_list: list = self._imapf(self._conn.search, None, "(SUBJECT %s)" % filename) list = list[0].split() if len(list) == 0 or list[0] == "": raise Exception("no such mail with subject '%s'" % filename) self._delete_single_mail(list[0]) log.Notice("marked %s to be deleted" % filename) self._expunge() log.Notice("IMAP expunged %s files" % len(list))
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # we expect an error return, so go low-level and ignore it try: p = os.popen(u"ncftpls -v") fout = p.read() ret = p.close() except Exception: pass # the expected error is 8 in the high-byte and some output if ret != 0x0800 or not fout: log.FatalError(u"NcFTP not found: Please install NcFTP version 3.1.9 or later", log.ErrorCode.ftp_ncftp_missing) # version is the second word of the first line version = fout.split(u'\n')[0].split()[1] if version < u"3.1.9": log.FatalError(u"NcFTP too old: Duplicity requires NcFTP version 3.1.9," u"3.2.1 or later. Version 3.2.0 will not work properly.", log.ErrorCode.ftp_ncftp_too_old) elif version == u"3.2.0": log.Warn(u"NcFTP (ncftpput) version 3.2.0 may fail with duplicity.\n" u"see: http://www.ncftpd.com/ncftp/doc/changelog.html\n" u"If you have trouble, please upgrade to 3.2.1 or later", log.WarningCode.ftp_ncftp_v320) log.Notice(u"NcFTP version is %s" % version) self.parsed_url = parsed_url self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url) # strip ncftp+ prefix self.url_string = duplicity.backend.strip_prefix(self.url_string, u'ncftp') # This squelches the "file not found" result from ncftpls when # the ftp backend looks for a collection that does not exist. # version 3.2.2 has error code 5, 1280 is some legacy value self.popen_breaks[u'ncftpls'] = [5, 1280] # Use an explicit directory name. if self.url_string[-1] != u'/': self.url_string += u'/' self.password = self.get_password() if globals.ftp_connection == u'regular': self.conn_opt = u'-E' else: self.conn_opt = u'-F' self.tempfile, self.tempname = tempdir.default().mkstemp() os.write(self.tempfile, u"host %s\n" % self.parsed_url.hostname) os.write(self.tempfile, u"user %s\n" % self.parsed_url.username) os.write(self.tempfile, u"pass %s\n" % self.password) os.close(self.tempfile) self.flags = u"-f %s %s -t %s -o useCLNT=0,useHELP_SITE=0 " % \ (self.tempname, self.conn_opt, globals.timeout) if parsed_url.port is not None and parsed_url.port != 21: self.flags += u" -P '%s'" % (parsed_url.port)
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # we expect an output try: p = os.popen("lftp --version") fout = p.read() ret = p.close() except Exception: pass # there is no output if lftp not found if not fout: log.FatalError("LFTP not found: Please install LFTP.", log.ErrorCode.ftps_lftp_missing) # version is the second word of the second part of the first line version = fout.split('\n')[0].split(' | ')[1].split()[1] log.Notice("LFTP version is %s" % version) self.parsed_url = parsed_url self.url_string = duplicity.backend.strip_auth_from_url( self.parsed_url) # Use an explicit directory name. if self.url_string[-1] != '/': self.url_string += '/' self.password = self.get_password() if globals.ftp_connection == 'regular': self.conn_opt = 'off' else: self.conn_opt = 'on' if parsed_url.port != None and parsed_url.port != 21: self.portflag = " -p '%s'" % (parsed_url.port) else: self.portflag = "" self.tempfile, self.tempname = tempdir.default().mkstemp() os.write(self.tempfile, "set ftp:ssl-allow true\n") os.write(self.tempfile, "set ftp:ssl-protect-data true\n") os.write(self.tempfile, "set ftp:ssl-protect-list true\n") os.write(self.tempfile, "set net:timeout %s\n" % globals.timeout) os.write(self.tempfile, "set net:max-retries %s\n" % globals.num_retries) os.write(self.tempfile, "set ftp:passive-mode %s\n" % self.conn_opt) os.write(self.tempfile, "open %s %s\n" % (self.portflag, self.parsed_url.hostname)) # allow .netrc auth by only setting user/pass when user was actually given if self.parsed_url.username: os.write( self.tempfile, "user %s %s\n" % (self.parsed_url.username, self.password)) os.close(self.tempfile) self.flags = "-f %s" % self.tempname
def request(self, method, path, data=None, redirected=0): u""" Wraps the connection.request method to retry once if authentication is required """ self._close() # or we get previous request's data or exception self.connect() quoted_path = urllib.parse.quote(path, u"/:~") if self.digest_challenge is not None: self.headers[u'Authorization'] = self.get_digest_authorization( path) log.Info( _(u"WebDAV %s %s request with headers: %s ") % (method, quoted_path, self.headers)) log.Info(_(u"WebDAV data length: %s ") % len(str(data))) self.conn.request(method, quoted_path, data, self.headers) response = self.conn.getresponse() log.Info( _(u"WebDAV response status %s with reason '%s'.") % (response.status, response.reason)) # resolve redirects and reset url on listing requests (they usually come before everything else) if response.status in [301, 302] and method == u'PROPFIND': redirect_url = response.getheader(u'location', None) response.close() if redirect_url: log.Notice( _(u"WebDAV redirect to: %s ") % urllib.parse.unquote(redirect_url)) if redirected > 10: raise FatalBackendException( _(u"WebDAV redirected 10 times. Giving up.")) self.parsed_url = duplicity.backend.ParsedUrl(redirect_url) self.directory = self.sanitize_path(self.parsed_url.path) return self.request(method, self.directory, data, redirected + 1) else: raise FatalBackendException( _(u"WebDAV missing location header in redirect response.")) elif response.status == 401: response.read() response.close() self.headers[u'Authorization'] = self.get_authorization( response, quoted_path) log.Info(_(u"WebDAV retry request with authentification headers.")) log.Info( _(u"WebDAV %s %s request2 with headers: %s ") % (method, quoted_path, self.headers)) log.Info(_(u"WebDAV data length: %s ") % len(str(data))) self.conn.request(method, quoted_path, data, self.headers) response = self.conn.getresponse() log.Info( _(u"WebDAV response2 status %s with reason '%s'.") % (response.status, response.reason)) return response
def __eq__(self, other): u""" Used in test suite """ if not isinstance(other, VolumeInfo): log.Notice(_(u"Other is not VolumeInfo")) return None if self.volume_number != other.volume_number: log.Notice(_(u"Volume numbers don't match")) return None if self.start_index != other.start_index: log.Notice(_(u"start_indicies don't match")) return None if self.end_index != other.end_index: log.Notice(_(u"end_index don't match")) return None hash_list1 = list(self.hashes.items()) hash_list1.sort() hash_list2 = list(other.hashes.items()) hash_list2.sort() if hash_list1 != hash_list2: log.Notice(_(u"Hashes don't match")) return None return 1
def filelist_globbing_get_sfs(self, filelist_fp, inc_default, list_name): u"""Return list of selection functions by reading fileobj filelist_fp should be an open file object inc_default is true iff this is an include list list_name is just the name of the list, used for logging See the man page on --[include/exclude]-globbing-filelist """ # Internal. Used by ParseArgs. log.Notice(_(u"Reading globbing filelist %s") % list_name) separator = config.null_separator and u"\0" or u"\n" filelist_fp.seek(0) for line in filelist_fp.read().split(separator): line, include = self.filelist_sanitise_line(line, inc_default) if not line: # Skip blanks and comment lines continue yield self.glob_get_sf(line, include)
def _list(self): lists = [] for s in self.__stores: config.are_errors_fatal[u'list'] = (False, []) l = s.list() log.Notice( _(u"MultiBackend: %s: %d files") % (s.backend.parsed_url.url_string, len(l))) if len(l) == 0 and duplicity.backend._last_exception: log.Warn( _(u"Exception during list of %s: %s" % (s.backend.parsed_url.url_string, util.uexc(duplicity.backend._last_exception)))) duplicity.backend._last_exception = None lists.append(l) # combine the lists into a single flat list w/o duplicates via set: result = list({item for sublist in lists for item in sublist}) log.Log(_(u"MultiBackend: combined list: %s") % (result), log.DEBUG) return result
def next(self): """Write next file, return filename""" if self.finished: raise StopIteration filename = "%s.%d" % (self.prefix, self.current_index) log.Info(_("Starting to write %s") % filename) outfp = open(filename, "wb") if not self.write_volume(outfp): # end of input self.finished = 1 if self.current_index == 1: # special case first index log.Notice( _("One only volume required.\n" "Renaming %s to %s") % (filename, self.prefix)) os.rename(filename, self.prefix) return self.prefix else: self.current_index += 1 return filename
def filelist_globbing_get_sfs(self, filelist_fp, inc_default, list_name): """Return list of selection functions by reading fileobj filelist_fp should be an open file object inc_default is true iff this is an include list list_name is just the name of the list, used for logging See the man page on --[include/exclude]-globbing-filelist """ log.Notice(_("Reading globbing filelist %s") % list_name) separator = globals.null_separator and "\0" or "\n" for line in filelist_fp.read().split(separator): if not line: # skip blanks continue if line[0] == "#": # skip comments continue if line[:2] == "+ ": yield self.glob_get_sf(line[2:], 1) elif line[:2] == "- ": yield self.glob_get_sf(line[2:], 0) else: yield self.glob_get_sf(line, inc_default)
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # we expect an output try: p = os.popen("lftp --version") fout = p.read() ret = p.close() except Exception: pass # there is no output if lftp not found if not fout: log.FatalError("LFTP not found: Please install LFTP.", log.ErrorCode.ftps_lftp_missing) # version is the second word of the second part of the first line version = fout.split('\n')[0].split(' | ')[1].split()[1] log.Notice("LFTP version is %s" % version) self.parsed_url = parsed_url # self.url_string = duplicity.backend.strip_auth_from_url(self.parsed_url) # # strip lftp+ prefix # self.url_string = duplicity.backend.strip_prefix(self.url_string, 'lftp') self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme, 'lftp').lower() self.scheme = re.sub('^webdav', 'http', self.scheme) self.url_string = self.scheme + '://' + parsed_url.hostname if parsed_url.port: self.url_string += ":%s" % parsed_url.port self.remote_path = re.sub('^/', '', parsed_url.path) # Fix up an empty remote path if len(self.remote_path) == 0: self.remote_path = '/' # Use an explicit directory name. if self.remote_path[-1] != '/': self.remote_path += '/' self.authflag = '' if self.parsed_url.username: self.username = self.parsed_url.username self.password = self.get_password() self.authflag = "-u '%s,%s'" % (self.username, self.password) if globals.ftp_connection == 'regular': self.conn_opt = 'off' else: self.conn_opt = 'on' # check for cacert file if https self.cacert_file = globals.ssl_cacert_file if self.scheme == 'https' and not globals.ssl_no_check_certificate: cacert_candidates = ["~/.duplicity/cacert.pem", "~/duplicity_cacert.pem", "/etc/duplicity/cacert.pem"] # if not self.cacert_file: for path in cacert_candidates: path = os.path.expanduser(path) if (os.path.isfile(path)): self.cacert_file = path break # still no cacert file, inform user if not self.cacert_file: raise duplicity.errors.FatalBackendException("""For certificate verification a cacert database file is needed in one of these locations: %s Hints: Consult the man page, chapter 'SSL Certificate Verification'. Consider using the options --ssl-cacert-file, --ssl-no-check-certificate .""" % ", ".join(cacert_candidates)) self.tempfile, self.tempname = tempdir.default().mkstemp() os.write(self.tempfile, "set ssl:verify-certificate " + ("false" if globals.ssl_no_check_certificate else "true") + "\n") if globals.ssl_cacert_file: os.write(self.tempfile, "set ssl:ca-file '" + globals.ssl_cacert_file + "'\n") if self.parsed_url.scheme == 'ftps': os.write(self.tempfile, "set ftp:ssl-allow true\n") os.write(self.tempfile, "set ftp:ssl-protect-data true\n") os.write(self.tempfile, "set ftp:ssl-protect-list true\n") elif self.parsed_url.scheme == 'ftpes': os.write(self.tempfile, "set ftp:ssl-force on\n") os.write(self.tempfile, "set ftp:ssl-protect-data on\n") os.write(self.tempfile, "set ftp:ssl-protect-list on\n") else: os.write(self.tempfile, "set ftp:ssl-allow false\n") os.write(self.tempfile, "set http:use-propfind true\n") os.write(self.tempfile, "set net:timeout %s\n" % globals.timeout) os.write(self.tempfile, "set net:max-retries %s\n" % globals.num_retries) os.write(self.tempfile, "set ftp:passive-mode %s\n" % self.conn_opt) if log.getverbosity() >= log.DEBUG: os.write(self.tempfile, "debug\n") if self.parsed_url.scheme == 'ftpes': os.write(self.tempfile, "open %s %s\n" % (self.authflag, self.url_string.replace('ftpes','ftp'))) else: os.write(self.tempfile, "open %s %s\n" % (self.authflag, self.url_string)) # os.write(self.tempfile, "open %s %s\n" % (self.portflag, self.parsed_url.hostname)) # allow .netrc auth by only setting user/pass when user was actually given # if self.parsed_url.username: # os.write(self.tempfile, "user %s %s\n" % (self.parsed_url.username, self.password)) os.close(self.tempfile) if log.getverbosity() >= log.DEBUG: f = open(self.tempname, 'r') log.Debug("SETTINGS: \n" "%s" % f.readlines())
def optimize_excludes(self): def make_fast_exclude_fn(paths): EOP = None def make_path_tree(paths): tree = {} for path in paths: dir_only = path != '/' and path[-1] == '/' t = tree for d in path.split('/'): if d == '': continue try: t = t[d] except KeyError: tt = {} t[d] = tt t = tt t[EOP] = dir_only return tree def is_prefixed_by(target, t): is_dir = target.isdir() for d in target.index: try: t = t[d] except KeyError: # Exclude if we've exactly matched a prefix return 0 if EOP in t else None # Exclude if this was an exact match, and: # - the target is a directory # - the target is a file, and the match pattern is not 'dir_only' return 0 if EOP in t and (not t[EOP] or is_dir) else None sel_func = lambda path, tree=make_path_tree(paths): is_prefixed_by( path, tree) sel_func.exclude = True sel_func.name = "Optimized tuple select (%s paths)" % ( len(paths), ) return sel_func # optimize simple includes i0 = 0 while i0 != len(self.selection_functions): numsf = len(self.selection_functions) while i0 != numsf and not hasattr(self.selection_functions[i0], 'tuple'): i0 += 1 if i0 == numsf: break i1 = i0 while i1 != numsf and hasattr(self.selection_functions[i1], 'tuple'): sel_func = self.selection_functions[i1] #log.Notice(_("+++++++++ HERE: %s %s %s" % (sel_func.name, sel_func.tuple, sel_func.match_only_dirs))) i1 += 1 paths = [ '/'.join(sel_fun.tuple) + ('/' if sel_fun.match_only_dirs else '') for sel_fun in self.selection_functions[i0:i1] ] sel_func = make_fast_exclude_fn(paths) self.selection_functions[i0:i1] = [sel_func] sel_func = self.selection_functions[i0] # replace exclusions in [i0, i1) range with an optimized exclusion # log.Notice(_("+++++++++ Replaced [%s, %s) = %s" % (i0, i1, paths))) log.Notice( _("+++++++++ Replaced [%s, %s) = %s paths" % (i0, i1, len(paths)))) log.Notice(_("+++++++++ HERE: %s" % (sel_func.name))) i0 += 1
def log_diff(log_string): log_str = _("Difference found:") + u" " + log_string log.Notice(log_str % (util.ufn(self.get_relative_path())))
def initialize_oauth2_session(self): u"""Setup or refresh oauth2 session with Amazon Drive""" def token_updater(token): u"""Stores oauth2 token on disk""" try: with open(self.OAUTH_TOKEN_PATH, u'w') as f: json.dump(token, f) except Exception as err: log.Error(u'Could not save the OAuth2 token to %s. This means ' u'you may need to do the OAuth2 authorization ' u'process again soon. Original error: %s' % (self.OAUTH_TOKEN_PATH, err)) token = None try: with open(self.OAUTH_TOKEN_PATH) as f: token = json.load(f) except IOError as err: log.Notice(u'Could not load OAuth2 token. ' u'Trying to create a new one. (original error: %s)' % err) self.http_client = OAuth2Session(self.CLIENT_ID, scope=self.OAUTH_SCOPE, redirect_uri=self.OAUTH_REDIRECT_URL, token=token, auto_refresh_kwargs={ u'client_id': self.CLIENT_ID, u'client_secret': self.CLIENT_SECRET, }, auto_refresh_url=self.OAUTH_TOKEN_URL, token_updater=token_updater) if token is not None: self.http_client.refresh_token(self.OAUTH_TOKEN_URL) endpoints_response = self.http_client.get(self.metadata_url + u'account/endpoint') if endpoints_response.status_code != requests.codes.ok: token = None if token is None: if not sys.stdout.isatty() or not sys.stdin.isatty(): log.FatalError(u'The OAuth2 token could not be loaded from %s ' u'and you are not running duplicity ' u'interactively, so duplicity cannot possibly ' u'access Amazon Drive.' % self.OAUTH_TOKEN_PATH) authorization_url, _ = self.http_client.authorization_url( self.OAUTH_AUTHORIZE_URL) print(u'') print( u'In order to allow duplicity to access Amazon Drive, please ' u'open the following URL in a browser and copy the URL of the ' u'page you see after authorization here:') print(authorization_url) print(u'') redirected_to = (input(u'URL of the resulting page: ').replace( u'http://', u'https://', 1)).strip() token = self.http_client.fetch_token( self.OAUTH_TOKEN_URL, client_secret=self.CLIENT_SECRET, authorization_response=redirected_to) endpoints_response = self.http_client.get(self.metadata_url + u'account/endpoint') endpoints_response.raise_for_status() token_updater(token) urls = endpoints_response.json() if u'metadataUrl' not in urls or u'contentUrl' not in urls: log.FatalError( u'Could not retrieve endpoint URLs for this account') self.metadata_url = urls[u'metadataUrl'] self.content_url = urls[u'contentUrl']
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # we expect an output try: p = os.popen(u"lftp --version") fout = p.read() ret = p.close() except Exception: pass # there is no output if lftp not found if not fout: log.FatalError(u"LFTP not found: Please install LFTP.", log.ErrorCode.ftps_lftp_missing) # version is the second word of the second part of the first line version = fout.split(u'\n')[0].split(u' | ')[1].split()[1] log.Notice(u"LFTP version is %s" % version) self.parsed_url = parsed_url self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme, u'lftp').lower() self.scheme = re.sub(u'^webdav', u'http', self.scheme) self.url_string = self.scheme + u'://' + parsed_url.hostname if parsed_url.port: self.url_string += u":%s" % parsed_url.port self.remote_path = re.sub(u'^/', u'', parsed_url.path) # Fix up an empty remote path if len(self.remote_path) == 0: self.remote_path = u'/' # Use an explicit directory name. if self.remote_path[-1] != u'/': self.remote_path += u'/' self.authflag = u'' if self.parsed_url.username: self.username = self.parsed_url.username self.password = self.get_password() self.authflag = u"-u '%s,%s'" % (self.username, self.password) if globals.ftp_connection == u'regular': self.conn_opt = u'off' else: self.conn_opt = u'on' # check for cacert file if https self.cacert_file = globals.ssl_cacert_file if self.scheme == u'https' and not globals.ssl_no_check_certificate: cacert_candidates = [ u"~/.duplicity/cacert.pem", u"~/duplicity_cacert.pem", u"/etc/duplicity/cacert.pem" ] # look for a default cacert file if not self.cacert_file: for path in cacert_candidates: path = os.path.expanduser(path) if (os.path.isfile(path)): self.cacert_file = path break # save config into a reusable temp file self.tempfd, self.tempname = tempdir.default().mkstemp() self.tempfile = os.fdopen(self.tempfd, u"w") self.tempfile.write(u"set ssl:verify-certificate " + ( u"false" if globals.ssl_no_check_certificate else u"true") + u"\n") if self.cacert_file: self.tempfile.write(u"set ssl:ca-file " + cmd_quote(self.cacert_file) + u"\n") if globals.ssl_cacert_path: self.tempfile.write(u"set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + u"\n") if self.parsed_url.scheme == u'ftps': self.tempfile.write(u"set ftp:ssl-allow true\n") self.tempfile.write(u"set ftp:ssl-protect-data true\n") self.tempfile.write(u"set ftp:ssl-protect-list true\n") elif self.parsed_url.scheme == u'ftpes': self.tempfile.write(u"set ftp:ssl-force on\n") self.tempfile.write(u"set ftp:ssl-protect-data on\n") self.tempfile.write(u"set ftp:ssl-protect-list on\n") else: self.tempfile.write(u"set ftp:ssl-allow false\n") self.tempfile.write(u"set http:use-propfind true\n") self.tempfile.write(u"set net:timeout %s\n" % globals.timeout) self.tempfile.write(u"set net:max-retries %s\n" % globals.num_retries) self.tempfile.write(u"set ftp:passive-mode %s\n" % self.conn_opt) if log.getverbosity() >= log.DEBUG: self.tempfile.write(u"debug\n") if self.parsed_url.scheme == u'ftpes': self.tempfile.write( u"open %s %s\n" % (self.authflag, self.url_string.replace(u'ftpes', u'ftp'))) else: self.tempfile.write(u"open %s %s\n" % (self.authflag, self.url_string)) self.tempfile.close() # print settings in debug mode if log.getverbosity() >= log.DEBUG: f = open(self.tempname, u'r') log.Debug(u"SETTINGS: \n" u"%s" % f.read())
def log_diff(log_string): log_str = _("Difference found:") + " " + log_string log.Notice(log_str % (self.get_relative_path(), ))