def __init__(self, site): try: x = netrc.netrc() except IOError: homedir = os.path.expanduser('~') + '\\Application Data\\' x = netrc.netrc(homedir + '.netrc') info = x.authenticators(site) if not info: raise Exception('Authentication not found in .netrc') self.user = info[0] self.passwd = info[2] self.ftp = FTP(site) self.ftp.login(user=self.user, passwd=self.passwd) #if this fails, login info incorrect self.setCwd(self.webRoot) self.local = Local()
def getuser(self): user = None localeval = self.localeval if self.config.has_option(self.getsection(), 'remoteusereval'): user = self.getconf('remoteusereval') if user != None: return localeval.eval(user) user = self.getconf('remoteuser') if user != None: return user try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: return netrcentry[0] try: netrcentry = netrc.netrc('/etc/netrc').authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: return netrcentry[0]
def getuser(self): user = None localeval = self.localeval if self.config.has_option(self.getsection(), 'remoteusereval'): user = self.getconf('remoteusereval') if user != None: return localeval.eval(user).encode('UTF-8') if self.config.has_option(self.getsection(), 'remoteuser'): # Assume the configuration file to be UTF-8 encoded so we must not # encode this string again. user = self.getconf('remoteuser') if user != None: return user try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: return netrcentry[0] try: netrcentry = netrc.netrc('/etc/netrc').authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: return netrcentry[0]
def get_netrc_login(path, host): try: if path: nrc = netrc.netrc(path) else: nrc = netrc.netrc() except netrc.NetrcParseError, e: raise Exception("%s:%d: %s" % (e.filename, e.lineno, e.msg))
def ftp_connection_info(ftp_host, netrc_file): """Return ftp connection info from netrc and optional host address.""" if not ftp_host: ftp_host = netrc(netrc_file).hosts.keys()[0] logininfo = netrc(netrc_file).authenticators(ftp_host) connection_params = { "ftp_user": logininfo[0], "ftp_password": logininfo[2], } return ftp_host, connection_params
def _get_netrc_from_path(cls, path): try: return netrc.netrc(path) except IOError: print >> sys.stderr, 'WARNING: Could not read netrc file %s' % path return netrc.netrc(os.devnull) except netrc.NetrcParseError as e: print >> sys.stderr, ('ERROR: Cannot use netrc file %s due to a ' 'parsing error: %s' % (path, e)) return netrc.netrc(os.devnull)
def getpassword(self): """Return the IMAP password for this repository. It tries to get passwords in the following order: 1. evaluate Repository 'remotepasseval' 2. read password from Repository 'remotepass' 3. read password from file specified in Repository 'remotepassfile' 4. read password from ~/.netrc 5. read password from /etc/netrc On success we return the password. If all strategies fail we return None.""" # 1. evaluate Repository 'remotepasseval' passwd = self.getconf('remotepasseval', None) if passwd != None: return self.localeval.eval(passwd).encode('UTF-8') # 2. read password from Repository 'remotepass' password = self.getconf('remotepass', None) if password != None: # Assume the configuration file to be UTF-8 encoded so we must not # encode this string again. return password # 3. read password from file specified in Repository 'remotepassfile' passfile = self.getconf('remotepassfile', None) if passfile != None: fd = codecs.open(os.path.expanduser(passfile), 'r', 'UTF-8') password = fd.readline().strip() fd.close() return password.encode('UTF-8') # 4. read password from ~/.netrc try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: user = self.getuser() if user == None or user == netrcentry[0]: return netrcentry[2] # 5. read password from /etc/netrc try: netrcentry = netrc.netrc('/etc/netrc').authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: user = self.getuser() if user == None or user == netrcentry[0]: return netrcentry[2] # no strategy yielded a password! return None
def __init__(self, site): try: x = netrc.netrc() except IOError: homedir = os.path.expanduser('~') + '\\Application Data\\' x = netrc.netrc(homedir + '.netrc') info = x.authenticators(site) self.user = info[0] self.passwd = info[2] self.ftp = FTP(site) self.ftp.login(user=self.user, passwd=self.passwd) self.setCwd(self.webRoot) self.local = Local()
def create_rpc_client(self, config): import transmissionrpc from transmissionrpc import TransmissionError from transmissionrpc import HTTPHandlerError user, password = None, None if 'netrc' in config: try: user, account, password = netrc(config['netrc']).authenticators(config['host']) except IOError as e: log.error('netrc: unable to open: %s' % e.filename) except NetrcParseError as e: log.error('netrc: %s, file: %s, line: %s' % (e.msg, e.filename, e.lineno)) else: if 'username' in config: user = config['username'] if 'password' in config: password = config['password'] try: cli = transmissionrpc.Client(config['host'], config['port'], user, password) except TransmissionError as e: if isinstance(e.original, HTTPHandlerError): if e.original.code == 111: raise PluginError("Cannot connect to transmission. Is it running?") elif e.original.code == 401: raise PluginError("Username/password for transmission is incorrect. Cannot connect.") elif e.original.code == 110: raise PluginError("Cannot connect to transmission: Connection timed out.") else: raise PluginError("Error connecting to transmission: %s" % e.original.message) else: raise PluginError("Error connecting to transmission: %s" % e.message) return cli
def _readPasswordNetrc(self, url): """Read the Trac username and password from a .netrc file. Parameters ---------- url : :class:`str` URL of the Trac server Returns ------- :func:`tuple` A tuple containing the username and password. If there is no .netrc file, or if the Trac server is not present, returns ``None``. """ from netrc import netrc try: rc = netrc() except IOError: return None trachost = url[url.index('//')+2:] if trachost.find('/') > 0: foo = hostname.split('/') trachost = foo[0] try: user, account, password = rc.hosts[trachost] except KeyError: return None return (user, password)
def get_netrc_auth(url): """Returns the Requests tuple auth for a given url from netrc.""" try: locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES) netrc_path = None for loc in locations: if os.path.exists(loc) and not netrc_path: netrc_path = loc # Abort early if there isn't one. if netrc_path is None: return netrc_path ri = urlparse(url) # Strip port numbers from netloc host = ri.netloc.split(':')[0] try: _netrc = netrc(netrc_path).authenticators(host) if _netrc: # Return with login / password login_i = (0 if _netrc[0] else 1) return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, # we'll just skip netrc auth pass # AppEngine hackiness. except (ImportError, AttributeError): pass
def get_netrc_auth(url): try: from netrc import netrc, NetrcParseError netrc_path = None for f in NETRC_FILES: try: loc = os.path.expanduser('~/{0}'.format(f)) except KeyError: return if os.path.exists(loc): netrc_path = loc break if netrc_path is None: return ri = urlparse(url) host = ri.netloc.split(':')[0] try: _netrc = netrc(netrc_path).authenticators(host) if _netrc: login_i = 0 if _netrc[0] else 1 return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): pass except (ImportError, AttributeError): pass
def nicovideo_download(url, output_dir=".", merge=True, info_only=False): import ssl ssl_context = request.HTTPSHandler(context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) cookie_handler = request.HTTPCookieProcessor() opener = request.build_opener(ssl_context, cookie_handler) request.install_opener(opener) import netrc, getpass try: info = netrc.netrc().authenticators("nicovideo") except FileNotFoundError: info = None if info is None: user = input("User: "******"Password: "******"Logging in...") nicovideo_login(user, password) html = get_html(url) # necessary! title = unicodize(r1(r'<span class="videoHeaderTitle"[^>]*>([^<]+)</span>', html)) vid = url.split("/")[-1].split("?")[0] api_html = get_html("http://www.nicovideo.jp/api/getflv?v=%s" % vid) real_url = parse.unquote(r1(r"url=([^&]+)&", api_html)) type, ext, size = url_info(real_url) print_info(site_info, title, type, size) if not info_only: download_urls([real_url], title, ext, size, output_dir, merge=merge)
def _parse_opts(args): parser = argparse.ArgumentParser( description="Compare two Graphite clusters for a given list of queries.", epilog="Through this module, a \"query\" is \"the name of a query\", a string.") # authentication authentication = parser.add_argument_group("authentication") authentication.add_argument("--netrc-file", metavar="FILENAME", dest="netrc_filename", action="store", help="a netrc file (default: $HOME/$USER/.netrc)", default="") # clusters parameters comparison_params = parser.add_argument_group("comparison parameters") comparison_params.add_argument("--hosts", metavar="HOST", dest="hosts", action="store", nargs=2, help="hosts to compare", required=True) comparison_params.add_argument("--input-file", metavar="FILENAME", dest="input_filename", action="store", help="text file containing one query per line", required=True) comparison_params.add_argument("--from", metavar="FROM_PARAM", dest="from_param", action="store", default="-24hours", help="from param for Graphite API (default: %(default)s)") comparison_params.add_argument("--until", metavar="UNTIL_PARAM", dest="until_param", action="store", default="-2minutes", help="until param for Graphite API (default: %(default)s)") comparison_params.add_argument( "--timeout", metavar="SECONDS", dest="timeout_s", action="store", type=float, help="timeout in seconds used to fetch queries (default: %(default)ss)", default=5) comparison_params.add_argument( "--threshold", metavar="PERCENT", action="store", type=float, default=1, help="percent threshold to evaluate equality between two values (default: %(default)s%%)") # outputs parameters outputs_params = parser.add_argument_group("outputs parameters") outputs_params.add_argument("--output-file", metavar="FILENAME", dest="output_filename", action="store", help="file containing outputs (default: stdout)", default="") outputs_params.add_argument("-v", "--verbosity", action="count", default=0, help="increases verbosity, can be passed multiple times") outputs_params.add_argument( "--show-max", metavar="N_LINES", dest="show_max", type=int, default=5, help="truncate the number of shown dissymmetry in outputs (default: %(default)s)") # TODO (t.chataigner) enable several kind of outputs : txt, csv, html... opts = parser.parse_args(args) # compute authentication keys from netrc file opts.auth_keys = [] for host in opts.hosts: auth = netrc.netrc().authenticators(host) if auth is not None: username = auth[0] password = auth[2] opts.auth_keys.append(base64.encodestring(username + ":" + password).replace("\n", "")) else: raise netrc.NetrcParseError("No authenticators for %s" % host) opts.threshold /= 100 return opts
def init_http(): handlers = [_UserAgentHandler()] mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() try: n = netrc.netrc() for host in n.hosts: p = n.hosts[host] mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2]) mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2]) except netrc.NetrcParseError: pass except IOError: pass handlers.append(_BasicAuthHandler(mgr)) handlers.append(_DigestAuthHandler(mgr)) if kerberos: handlers.append(_KerberosAuthHandler()) if 'http_proxy' in os.environ: url = os.environ['http_proxy'] handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url})) if 'REPO_CURL_VERBOSE' in os.environ: handlers.append(urllib.request.HTTPHandler(debuglevel=1)) handlers.append(urllib.request.HTTPSHandler(debuglevel=1)) urllib.request.install_opener(urllib.request.build_opener(*handlers))
def on_authentication_required(self, reply, authenticator): """Called when a website needs authentication.""" user, password = None, None if not hasattr(reply, "netrc_used") and 'HOME' in os.environ: # We'll get an OSError by netrc if 'HOME' isn't available in # os.environ. We don't want to log that, so we prevent it # altogether. reply.netrc_used = True try: net = netrc.netrc() authenticators = net.authenticators(reply.url().host()) if authenticators is not None: (user, _account, password) = authenticators except FileNotFoundError: log.misc.debug("No .netrc file found") except OSError: log.misc.exception("Unable to read the netrc file") except netrc.NetrcParseError: log.misc.exception("Error when parsing the netrc file") if user is None: # netrc check failed answer = self._ask("Username ({}):".format(authenticator.realm()), mode=usertypes.PromptMode.user_pwd, owner=reply) if answer is not None: user, password = answer.user, answer.password if user is not None: authenticator.setUser(user) authenticator.setPassword(password)
def __init__(self, key=None, password=None, **kwargs): netrcfile = kwargs.pop('netrcfile', None) super(BasicClient, self).__init__(**kwargs) if key is None: key = os.environ.get('SMARTFILE_API_KEY') if password is None: password = os.environ.get('SMARTFILE_API_PASSWORD') if key is None or password is None: try: rc = netrc(netrcfile) except: pass else: urlp = urlparse.urlparse(self.url) auth = rc.authenticators(urlp.netloc) if auth is not None: if key is None: key = auth[0] if key is None: key = auth[1] if password is None: password = auth[2] try: self.key, self.password = clean_tokens(key, password) except ValueError: raise APIError('Please provide an API key and password. Use ' 'arguments or environment variables.')
def _get_login_info(self): """ Get the the login info as (username, password) It will look in the netrc file using the _NETRC_MACHINE value If there's no info available, return (None, None) """ if self._downloader is None: return (None, None) username = None password = None downloader_params = self._downloader.params # Attempt to use provided username and password or .netrc data if downloader_params.get('username', None) is not None: username = downloader_params['username'] password = downloader_params['password'] elif downloader_params.get('usenetrc', False): try: info = netrc.netrc().authenticators(self._NETRC_MACHINE) if info is not None: username = info[0] password = info[2] else: raise netrc.NetrcParseError('No authenticators for %s' % self._NETRC_MACHINE) except (IOError, netrc.NetrcParseError) as err: self._downloader.report_warning('parsing .netrc: %s' % compat_str(err)) return (username, password)
def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies cj = cookielib.CookieJar() # Create the password manager and load with the credentials using pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples for host, info in accounts.iteritems(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] urllib2.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) r = urllib2.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() # When an error is returned, we parse the error message from the # server and return it in a ``ClientError`` exception. if resp.get("content-description") == "dods_error": m = re.search('code = (?P<code>\d+);\s*message = "(?P<msg>.*)"', data, re.DOTALL | re.MULTILINE) msg = 'Server error %(code)s: "%(msg)s"' % m.groupdict() raise ClientError(msg) return resp, data from pydap.util import http http.request = new_request
def setUpClass(cls): """Sets up the gerrit instances in a class-specific temp dir.""" # Create gerrit instance. gerrit_dir = tempfile.mkdtemp() os.chmod(gerrit_dir, 0o700) gi = cls.gerrit_instance = cls._create_gerrit_instance(gerrit_dir) # Set netrc file for http authentication. cls.gerrit_util_netrc_orig = gerrit_util.NETRC gerrit_util.NETRC = netrc.netrc(gi.netrc_file) # gerrit_util.py defaults to using https, but for testing, it's much # simpler to use http connections. cls.gerrit_util_protocol_orig = gerrit_util.GERRIT_PROTOCOL gerrit_util.GERRIT_PROTOCOL = 'http' # Because we communicate with the test server via http, rather than https, # libcurl won't add authentication headers to raw git requests unless the # gerrit server returns 401. That works for pushes, but for read operations # (like git-ls-remote), gerrit will simply omit any ref that requires # authentication. By default gerrit doesn't permit anonymous read access to # refs/meta/config. Override that behavior so tests can access # refs/meta/config if necessary. clone_path = os.path.join(gi.gerrit_dir, 'tmp', 'All-Projects') cls._CloneProject('All-Projects', clone_path) project_config = os.path.join(clone_path, 'project.config') cls.check_call(['git', 'config', '--file', project_config, '--add', 'access.refs/meta/config.read', 'group Anonymous Users']) cls.check_call(['git', 'add', project_config], cwd=clone_path) cls.check_call( ['git', 'commit', '-m', 'Anonyous read for refs/meta/config'], cwd=clone_path) cls.check_call(['git', 'push', 'origin', 'HEAD:refs/meta/config'], cwd=clone_path)
def on_authentication_required(self, reply, authenticator): """Called when a website needs authentication.""" user, password = None, None if not hasattr(reply, "netrc_used"): reply.netrc_used = True try: net = netrc.netrc() authenticators = net.authenticators(reply.url().host()) if authenticators is not None: # pylint: disable=unpacking-non-sequence (user, _account, password) = authenticators except FileNotFoundError: log.misc.debug("No .netrc file found") except OSError: log.misc.exception("Unable to read the netrc file") except netrc.NetrcParseError: log.misc.exception("Error when parsing the netrc file") if user is None: # netrc check failed answer = self._ask( "Username ({}):".format(authenticator.realm()), mode=usertypes.PromptMode.user_pwd, owner=reply) if answer is not None: user, password = answer.user, answer.password if user is not None: authenticator.setUser(user) authenticator.setPassword(password)
def get_credentials(username=None, password=None): try: my_netrc = netrc.netrc() except: pass else: auth = my_netrc.authenticators(GITHUB_API_HOST) if auth: response = '' while response.lower() not in ('y', 'n'): print('Using the following GitHub credentials from ' '~/.netrc: {0}/{1}'.format(auth[0], '*' * 8)) response = input( 'Use these credentials (if not you will be prompted ' 'for new credentials)? [Y/n] ') if response.lower() == 'y': username = auth[0] password = auth[2] if not (username or password): print("Enter your GitHub username and password so that API " "requests aren't as severely rate-limited...") username = raw_input('Username: '******'Password: '******'t as severely rate-limited...") password = getpass.getpass('Password: ') return username, password
def _resolve_credentials(fqdn, login): """Look up special forms of credential references.""" result = login if "$" in result: result = os.path.expandvars(result) if result.startswith("netrc:"): result = result.split(':', 1)[1] if result: result = os.path.abspath(os.path.expanduser(result)) accounts = netrc.netrc(result or None) account = accounts.authenticators(fqdn) if not account or not(account[0] or account[1]): raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % ( fqdn, result or "default")) # account is (login, account, password) user, pwd = account[0] or account[1], account[2] or "" result = "%s:%s" % (user, pwd) else: if result.startswith("file:"): result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1])) with closing(io.open(result, 'r', encoding='utf-8')) as handle: result = handle.read().strip() try: user, pwd = result.split(':', 1) except ValueError: user, pwd = result, "" trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd)) return result
def upload_to_dropbox( backend ): md = CondMetaData() # check if the expected input file is there... if not path.exists( dbFileForDropBox ): print 'The input sqlite file has not been produced.' return False # first remove any existing metadata file... if path.exists( '%s.txt' %fileNameForDropBox ): remove( '%s.txt' %fileNameForDropBox ) try: dropBox = upload_popcon.ConditionsUploader(upload_popcon.defaultHostname, upload_popcon.defaultUrlTemplate) # Try to find the netrc entry try: (username, account, password) = netrc.netrc().authenticators(upload_popcon.defaultNetrcHost) except Exception: print 'Netrc entry "DropBox" not found.' return False print 'signing in...' dropBox.signIn(username, password) print 'signed in' ret = True for k,v in md.records().items(): destTag = v.get("destinationTag") inputTag = v.get("sqliteTag") if inputTag == None: inputTag = destTag comment = v.get("comment") metadata = md.dumpMetadataForUpload( inputTag, destTag, comment ) ret &= dropBox.uploadFile(dbFileForDropBox, backend, upload_popcon.defaultTemporaryFile) dropBox.signOut() return ret except upload_popcon.HTTPError as e: print e return False
def _login(self): login, _, password = netrc().hosts['instagram'] credentials = { 'username': login, 'password': password } main_page = self._session.get('https://www.instagram.com/') anti_rate_limit_sleep() csrftoken = main_page.cookies['csrftoken'] self._update_headers(csrftoken) login_result = self._session.post( 'https://www.instagram.com/accounts/login/ajax/', data=credentials) anti_rate_limit_sleep() main_page_again = self._session.get('https://www.instagram.com/') spit(main_page_again.text, 'main-after-login.html') anti_rate_limit_sleep() if not InstaAPI.SHARED_DATA_SUBSTRING in main_page_again.content: _log.error('No line with sharedData in main page response (login)') _log.error(main_page_again.content) return False _log.debug('Logged in') save_cookies(self._session, 'cookies.json') self._query_hash = self._find_query_hash(main_page_again.text) return True
def on_authentication_required(self, reply, authenticator): """Called when a website needs authentication.""" user, password = None, None if not hasattr(reply, "netrc_used") and 'HOME' in os.environ: # We'll get an OSError by netrc if 'HOME' isn't available in # os.environ. We don't want to log that, so we prevent it # altogether. reply.netrc_used = True try: net = netrc.netrc(config.get('network', 'netrc-file')) authenticators = net.authenticators(reply.url().host()) if authenticators is not None: (user, _account, password) = authenticators except FileNotFoundError: log.misc.debug("No .netrc file found") except OSError: log.misc.exception("Unable to read the netrc file") except netrc.NetrcParseError: log.misc.exception("Error when parsing the netrc file") if user is not None: authenticator.setUser(user) authenticator.setPassword(password) else: abort_on = self._get_abort_signals(reply) shared.authentication_required(reply.url(), authenticator, abort_on=abort_on)
def set_parser(self, args=None): if args is None: args = self.parser.parse_args() self.args = args # セッションが保存されていた場合、それを使う if path.exists(cookie_path): self.load_cookies() # ログインできていたらリターン if self.is_logged_in(): return mail = args.mail password = args.passwd # ユーザー名とパスワードをnetrcから取得 if mail is None or password is None: try: auth = netrc.netrc() mail, _, password = auth.authenticators("nicovideo") except OSError as e: print(e) raise LoginFailedException("ログインに失敗しました") # ログインしてセッションを取得 self.mail = mail self.password = password self.web_driver = args.web_driver self.login()
def main(): h5r_netrc = netrc.netrc() (username, account, password) = h5r_netrc.authenticators("html5rocks.com") g = Github(password) repo = g.get_repo(repository) open_issues = repo.get_issues(state="open") closed_issues = repo.get_issues(state="closed") issues = [] [issues.append(i) for i in open_issues] [issues.append(i) for i in closed_issues] today = datetime.today() completed_articles, late_articles, due_articles = ParseIssues(issues) print "HTML5 Rocks Quarter Report for %s" % today.date() print "=========================================\n" print "Articles due this quater" print "------------------------\n" if len(due_articles) == 0: print "There are no articles due this quarter, either all is good, or something messed up!\n" else: print "|Author|Article|Delivery date|Tech Writer|State|" print "|------|-------|-------------|-----------|-----|" for article in due_articles: print "|%s|[%s](%s)|%s|%s|%s" % ((article.assignee or article.user).name, article.title, article.html_url, article.due_on.date(), article.tech_writer, article.state)
def delete_heroku_server(task_name): heroku_directory_name = \ glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))[0] heroku_directory_path = os.path.join(parent_dir, heroku_directory_name) heroku_executable_path = \ os.path.join(heroku_directory_path, 'bin', 'heroku') heroku_user_identifier = ( netrc.netrc(os.path.join(os.path.expanduser("~"), '.netrc')) .hosts['api.heroku.com'][0] ) heroku_app_name = ('{}-{}-{}'.format( user_name, task_name, hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest() ))[:30] while heroku_app_name[-1] == '-': heroku_app_name = heroku_app_name[:-1] print("Heroku: Deleting server: {}".format(heroku_app_name)) subprocess.check_output(shlex.split( '{} destroy {} --confirm {}'.format( heroku_executable_path, heroku_app_name, heroku_app_name ) ))
def netrc_from_env(): netrc_obj = None netrc_path = os.environ.get('NETRC') try: if netrc_path is not None: netrc_path = Path(netrc_path) else: home_dir = Path.home() if os.name == 'nt': # pragma: no cover netrc_path = home_dir.joinpath('_netrc') else: netrc_path = home_dir.joinpath('.netrc') if netrc_path and netrc_path.is_file(): try: netrc_obj = netrc.netrc(str(netrc_path)) except (netrc.NetrcParseError, OSError) as e: client_logger.warning(".netrc file parses fail: %s", e) if netrc_obj is None: client_logger.warning("could't find .netrc file") except RuntimeError as e: # pragma: no cover """ handle error raised by pathlib """ client_logger.warning("could't find .netrc file: %s", e) return netrc_obj
def get_credentials_from_netrc(server, netrc_file=DEFAULT_NETRC_FILE): cred = netrc.netrc(os.path.expanduser(netrc_file)) username, _, password = cred.authenticators(server) return username, password
def from_cddis(HOST, username=None, password=None, build=True, timeout=None, local=None, hash='', chunk=16384, verbose=False, fid=sys.stdout, mode=0o775): """ Download a file from GSFC CDDIS archive server Arguments --------- HOST: remote https host path split as list Keyword arguments ----------------- username: NASA Earthdata username password: NASA Earthdata password build: Build opener and check Earthdata credentials timeout: timeout in seconds for blocking operations local: path to local file hash: MD5 hash of local file chunk: chunk size for transfer encoding verbose: print file transfer information fid: open file object to print if verbose mode: permissions mode of output local file Returns ------- remote_buffer: BytesIO representation of file """ #-- use netrc credentials if build and not (username or password): urs = 'urs.earthdata.nasa.gov' username, login, password = netrc.netrc().authenticators(urs) #-- build urllib2 opener and check credentials if build: #-- build urllib2 opener with credentials build_opener(username, password) #-- check credentials check_credentials() #-- Encode username/password for request authorization headers base64_string = base64.b64encode('{0}:{1}'.format(username, password).encode()) authorization_header = "Basic {0}".format(base64_string.decode()) #-- try downloading from https try: #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) request.add_header("Authorization", authorization_header) response = urllib2.urlopen(request, timeout=timeout) except: raise Exception('Download error from {0}'.format( posixpath.join(*HOST))) else: #-- copy remote file contents to bytesIO object remote_buffer = io.BytesIO() shutil.copyfileobj(response, remote_buffer, chunk) remote_buffer.seek(0) #-- save file basename with bytesIO object remote_buffer.filename = HOST[-1] #-- generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() #-- compare checksums if local and (hash != remote_hash): #-- convert to absolute path local = os.path.abspath(local) #-- create directory if non-existent if not os.access(os.path.dirname(local), os.F_OK): os.makedirs(os.path.dirname(local), mode) #-- print file information if verbose: args = (posixpath.join(*HOST), local) print('{0} -->\n\t{1}'.format(*args), file=fid) #-- store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(os.path.expanduser(local), 'wb') as f: shutil.copyfileobj(remote_buffer, f, chunk) #-- change the permissions mode os.chmod(local, mode) #-- return the bytesIO object remote_buffer.seek(0) return remote_buffer
def cddis_list(HOST, username=None, password=None, build=True, timeout=None, parser=lxml.etree.HTMLParser(), pattern='', sort=False): """ List a directory on GSFC CDDIS archive server Arguments --------- HOST: remote https host path split as list Keyword arguments ----------------- username: NASA Earthdata username password: NASA Earthdata password build: Build opener and check Earthdata credentials timeout: timeout in seconds for blocking operations parser: HTML parser for lxml pattern: regular expression pattern for reducing list sort: sort output list Returns ------- colnames: list of column names in a directory collastmod: list of last modification times for items in the directory """ #-- use netrc credentials if build and not (username or password): urs = 'urs.earthdata.nasa.gov' username, login, password = netrc.netrc().authenticators(urs) #-- build urllib2 opener and check credentials if build: #-- build urllib2 opener with credentials build_opener(username, password) #-- check credentials check_credentials() #-- Encode username/password for request authorization headers base64_string = base64.b64encode('{0}:{1}'.format(username, password).encode()) authorization_header = "Basic {0}".format(base64_string.decode()) #-- try listing from https try: #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) request.add_header("Authorization", authorization_header) tree = lxml.etree.parse(urllib2.urlopen(request, timeout=timeout), parser) except: raise Exception('List error from {0}'.format(posixpath.join(*HOST))) else: #-- read and parse request for files (column names and modified times) #-- find directories colnames = tree.xpath('//div[@class="archiveDir"]/div/a/text()') collastmod = [None] * (len(colnames)) #-- find files colnames.extend(tree.xpath('//div[@class="archiveItem"]/div/a/text()')) #-- get the Unix timestamp value for a modification time collastmod.extend([ get_unix_time(i[:19], format='%Y:%m:%d %H:%M:%S') for i in tree.xpath('//div[@class="archiveItem"]/div/span/text()') ]) #-- reduce using regular expression pattern if pattern: i = [i for i, f in enumerate(colnames) if re.search(pattern, f)] #-- reduce list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- sort the list if sort: i = [i for i, j in sorted(enumerate(colnames), key=lambda i: i[1])] #-- sort list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- return the list of column names and last modified times return (colnames, collastmod)
depsonly = None try: device = product[product.index("_") + 1:] except: device = product if not depsonly: print( "Device %s not found. Attempting to retrieve device repository from FrankenRom Github (https://github.com/FrankenRom)." % device) repositories = [] try: authtuple = netrc.netrc().authenticators("api.github.com") if authtuple: auth_string = ('%s:%s' % (authtuple[0], authtuple[2])).encode() githubauth = base64.encodestring(auth_string).decode().replace( '\n', '') else: githubauth = None except: githubauth = None def add_auth(githubreq): if githubauth: githubreq.add_header("Authorization", "Basic %s" % githubauth)
def checkstatus(self, fetch, ud, d, try_again=True): import urllib.request, urllib.error, urllib.parse, socket, http.client from urllib.response import addinfourl from bb.fetch2 import FetchConnectionCache class HTTPConnectionCache(http.client.HTTPConnection): if fetch.connection_cache: def connect(self): """Connect to the host and port specified in __init__.""" sock = fetch.connection_cache.get_connection(self.host, self.port) if sock: self.sock = sock else: self.sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) fetch.connection_cache.add_connection(self.host, self.port, self.sock) if self._tunnel_host: self._tunnel() class CacheHTTPHandler(urllib.request.HTTPHandler): def http_open(self, req): return self.do_open(HTTPConnectionCache, req) def do_open(self, http_class, req): """Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code """ host = req.host if not host: raise urlllib2.URLError('no host given') h = http_class(host, timeout=req.timeout) # will parse host:port h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update(dict((k, v) for k, v in list(req.headers.items()) if k not in headers)) # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. # Don't close connection when connection_cache is enabled, if fetch.connection_cache is None: headers["Connection"] = "close" else: headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 headers = dict( (name.title(), val) for name, val in list(headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # XXX what error? # Don't close connection when cache is enabled. if fetch.connection_cache is None: h.close() raise urllib.error.URLError(err) else: try: r = h.getresponse(buffering=True) except TypeError: # buffering kw not supported r = h.getresponse() # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. # Wrap the HTTPResponse object in socket's file object adapter # for Windows. That adapter calls recv(), so delegate recv() # to read(). This weird wrapping allows the returned object to # have readline() and readlines() methods. # XXX It might be better to extract the read buffering code # out of socket._fileobject() and into a base class. r.recv = r.read # no data, just have to read r.read() class fp_dummy(object): def read(self): return "" def readline(self): return "" def close(self): pass resp = addinfourl(fp_dummy(), r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason # Close connection when server request it. if fetch.connection_cache is not None: if 'Connection' in r.msg and r.msg['Connection'] == 'close': fetch.connection_cache.remove_connection(h.host, h.port) return resp class HTTPMethodFallback(urllib.request.BaseHandler): """ Fallback to GET if HEAD is not allowed (405 HTTP error) """ def http_error_405(self, req, fp, code, msg, headers): fp.read() fp.close() newheaders = dict((k,v) for k,v in list(req.headers.items()) if k.lower() not in ("content-length", "content-type")) return self.parent.open(urllib.request.Request(req.get_full_url(), headers=newheaders, origin_req_host=req.origin_req_host, unverifiable=True)) """ Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403 Forbidden when they actually mean 405 Method Not Allowed. """ http_error_403 = http_error_405 """ Some servers (e.g. FusionForge) returns 406 Not Acceptable when they actually mean 405 Method Not Allowed. """ http_error_406 = http_error_405 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): """ urllib2.HTTPRedirectHandler resets the method to GET on redirect, when we want to follow redirects using the original method. """ def redirect_request(self, req, fp, code, msg, headers, newurl): newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) newreq.get_method = lambda: req.get_method() return newreq exported_proxies = export_proxies(d) handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] if export_proxies: handlers.append(urllib.request.ProxyHandler()) handlers.append(CacheHTTPHandler()) # XXX: Since Python 2.7.9 ssl cert validation is enabled by default # see PEP-0476, this causes verification errors on some https servers # so disable by default. import ssl if hasattr(ssl, '_create_unverified_context'): handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) opener = urllib.request.build_opener(*handlers) try: uri = ud.url.split(";")[0] r = urllib.request.Request(uri) r.get_method = lambda: "HEAD" def add_basic_auth(login_str, request): '''Adds Basic auth to http request, pass in login:password as string''' import base64 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") authheader = "Basic %s" % encodeuser r.add_header("Authorization", authheader) if ud.user: add_basic_auth(ud.user, r) try: import netrc, urllib.parse n = netrc.netrc() login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) add_basic_auth("%s:%s" % (login, password), r) except (TypeError, ImportError, IOError, netrc.NetrcParseError): pass opener.open(r) except urllib.error.URLError as e: if try_again: logger.debug(2, "checkstatus: trying again") return self.checkstatus(fetch, ud, d, False) else: # debug for now to avoid spamming the logs in e.g. remote sstate searches logger.debug(2, "checkstatus() urlopen failed: %s" % e) return False return True
class NNTPBase: def __init__(self, host, sock, user=None, password=None, readermode=None, usenetrc=True): """Initialize an instance. Arguments: - host: hostname connected to - sock: already-connected socket or socket-like object - user: username to authenticate with - password: password to use with username - readermode: if true, send 'mode reader' command after connecting. readermode is sometimes necessary if you are connecting to an NNTP server on the local machine and intend to call reader-specific comamnds, such as `group'. If you get unexpected NNTPPermanentErrors, you might need to set readermode. """ self.sock = sock self.file = self.sock.makefile('rb') self.debugging = 0 self.welcome = self.getresp() # 'mode reader' is sometimes necessary to enable 'reader' mode. # However, the order in which 'mode reader' and 'authinfo' need to # arrive differs between some NNTP servers. Try to send # 'mode reader', and if it fails with an authorization failed # error, try again after sending authinfo. readermode_afterauth = 0 if readermode: try: self.welcome = self.shortcmd('mode reader') except NNTPPermanentError: # error 500, probably 'not implemented' pass except NNTPTemporaryError, e: if user and e.response[:3] == '480': # Need authorization before 'mode reader' readermode_afterauth = 1 else: raise # If no login/password was specified, try to get them from ~/.netrc # Presume that if .netc has an entry, NNRP authentication is required. try: if usenetrc and not user: import netrc credentials = netrc.netrc() auth = credentials.authenticators(host) if auth: user = auth[0] password = auth[2] except IOError: pass # Perform NNRP authentication if needed. if user: resp = self.shortcmd('authinfo user ' + user) if resp[:3] == '381': if not password: raise NNTPReplyError(resp) else: resp = self.shortcmd('authinfo pass ' + password) if resp[:3] != '281': raise NNTPPermanentError(resp) if readermode_afterauth: try: self.welcome = self.shortcmd('mode reader') except NNTPPermanentError: # error 500, probably 'not implemented' pass
def main(): global verbose, interactive, mac, rmok, nologin try: opts, args = getopt.getopt(sys.argv[1:], 'a:bil:mnp:qrs:v') except getopt.error as msg: usage(msg) login = '' passwd = '' account = '' if not args: usage('hostname missing') host = args[0] port = 0 if ':' in host: host, port = host.split(':', 1) port = int(port) try: auth = netrc.netrc().authenticators(host) if auth is not None: login, account, passwd = auth except (netrc.NetrcParseError, IOError): pass for o, a in opts: if o == '-l': login = a if o == '-p': passwd = a if o == '-a': account = a if o == '-v': verbose = verbose + 1 if o == '-q': verbose = 0 if o == '-i': interactive = 1 if o == '-m': mac = 1 nologin = 1 skippats.append('*.o') if o == '-n': nologin = 1 if o == '-r': rmok = 1 if o == '-s': skippats.append(a) remotedir = '' localdir = '' if args[1:]: remotedir = args[1] if args[2:]: localdir = args[2] if args[3:]: usage('too many arguments') # f = ftplib.FTP() if verbose: print("Connecting to '%s%s'..." % (host, (port and ":%d" % port or ""))) f.connect(host, port) if not nologin: if verbose: print('Logging in as %r...' % (login or 'anonymous')) f.login(login, passwd, account) if verbose: print('OK.') pwd = f.pwd() if verbose > 1: print('PWD =', repr(pwd)) if remotedir: if verbose > 1: print('cwd(%s)' % repr(remotedir)) f.cwd(remotedir) if verbose > 1: print('OK.') pwd = f.pwd() if verbose > 1: print('PWD =', repr(pwd)) # mirrorsubdir(f, localdir)
from collections import defaultdict import codecs import json import os import pystache import requests import time import netrc from pygithub3 import Github repos_in = 'repos.json' index_in = 'index.mustache' index_out = 'index.html' auth = netrc.netrc() try: (login, _, password) = auth.authenticators('api.github.com') ghclient = Github(login=login, password=password) logged_in = True except: ghclient = Github() logged_in = False def gh_repo(name): print('Fetching "%s" repo information...' % name) # Use the following for development so you do not hammer the GitHub API. #return {'name': name, 'html_url': 'http://google.com', 'homepage': 'http://example.com', 'description': 'Description!'} if not logged_in:
def __init__(self, destinationFolder, password=None, user=None, url="https://e4ftl01.cr.usgs.gov", tiles=None, path="MOLT", product="MOD11A1.005", today=None, enddate=None, delta=10, jpg=False, debug=False, timeout=30, checkgdal=True): """Function to initialize the object""" # prepare the base url and set the url type (ftp/http) if 'ftp://' in url: self.url = url.replace('ftp://', '').rstrip('/') self.urltype = 'ftp' elif 'http://' in url: self.url = url self.urltype = 'http' elif 'https://' in url: self.url = url self.urltype = 'http' else: raise IOError("The url should contain 'ftp://' or 'http://'") if not user and not password and not URLPARSE: raise IOError("Please use 'user' and 'password' parameters") elif not user and not password and URLPARSE: self.domain = urlparse(self.url).hostname try: nt = netrc.netrc() except: raise IOError("Please set 'user' and 'password' parameters" ", netrc file does not exist") try: account = nt.hosts[self.domain] except: try: account = nt.hosts['urs.earthdata.nasa.gov'] except: raise IOError("Please set 'user' and 'password' parameters" ", netrc file does not contain parameter " "for NASA url") # user for download self.user = account[0] # password for download self.password = account[2] else: # user for download self.user = user # password for download self.password = password self.userpwd = "{us}:{pw}".format(us=self.user, pw=self.password) userAndPass = b64encode(str.encode(self.userpwd)).decode("ascii") self.http_header = {'Authorization': 'Basic %s' % userAndPass} cookieprocessor = urllib.request.HTTPCookieProcessor() opener = urllib.request.build_opener(ModisHTTPRedirectHandler, cookieprocessor) urllib.request.install_opener(opener) # the product (product_code.004 or product_cod.005) self.product = product self.product_code = product.split('.')[0] # url directory where data are located self.path = urljoin(path, self.product) # tiles to downloads if isinstance(tiles, str): self.tiles = tiles.split(',') else: # tiles are list, tuple, or None self.tiles = tiles # set destination folder if not os.path.isdir(destinationFolder): os.makedirs(destinationFolder) self.writeFilePath = destinationFolder elif os.access(destinationFolder, os.W_OK): self.writeFilePath = destinationFolder else: try: os.mkdir(destinationFolder) self.writeFilePath = destinationFolder except: raise Exception("Folder to store downloaded files does not " "exist or is not writeable") # return the name of product if len(self.path.split('/')) == 2: self.product = self.path.split('/')[1] elif len(self.path.split('/')) == 3: self.product = self.path.split('/')[2] # write a file with the name of file to be downloaded self.filelist = open( os.path.join(self.writeFilePath, 'listfile{pro}.txt'.format(pro=self.product)), 'w') # set if to download jpgs self.jpeg = jpg # today, or the last day in the download series chronologically self.today = today # chronologically the first day in the download series self.enday = enddate # default number of days to consider if enddate not specified self.delta = delta # status of tile download self.status = True # for debug, you can download only xml files self.debug = debug # for logging log_filename = os.path.join(self.writeFilePath, 'modis{pro}.log'.format(pro=self.product)) log_format = '%(asctime)s - %(levelname)s - %(message)s' logging.basicConfig(filename=log_filename, level=logging.DEBUG, format=log_format) logging.captureWarnings(True) # global connection attempt counter self.nconnection = 0 # timeout for HTTP connection before failing (seconds) self.timeout = timeout # files within the directory where data will be saved self.fileInPath = [] for f in os.listdir(self.writeFilePath): if os.path.isfile(os.path.join(self.writeFilePath, f)): self.fileInPath.append(f) global GDAL if not GDAL and checkgdal: logging.warning("WARNING: Python GDAL library not found") elif GDAL and not checkgdal: GDAL = False self.dirData = []
def humanizeFileSize(size): size = abs(size) if (size == 0): return "0B" units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'] p = math.floor(math.log(size, 2) / 10) return "%.3f%s" % (size / math.pow(1024, p), units[int(p)]) if __name__ == '__main__': # we need an active login to test import netrc try: n = netrc.netrc() username, account, password = n.authenticators( 'jottacloud.com') # read .netrc entry for 'machine jottacloud' except Exception as e: logging.exception(e) username = os.environ['JOTTACLOUD_USERNAME'] password = os.environ['JOTTACLOUD_PASSWORD'] jfs = JFS.JFS(auth=(username, password)) lite = LiteJFS(username, password) filesize = 1024 * 10 * 10 data = os.urandom(filesize) testfile = tempfile.NamedTemporaryFile() puts(colored.blue('Creating test file.'))
def parse(): ''' Read command-line arguments, return a simple configuration for running tests. ''' parser = argparse.ArgumentParser(description='Connect to an available board, flash image(s), and run tests.', usage='bft [options...]', formatter_class=argparse.RawDescriptionHelpFormatter, epilog=HELP_EPILOG) parser.add_argument('-a', '--analysis', metavar='', type=str, default=None, help='Only run post processing analysis on logs') parser.add_argument('-b', '--board_type', metavar='', type=str, nargs='+', default=None, help='MODEL(s) of board to connect to') parser.add_argument('-c', '--config_file', metavar='', type=str, default=boardfarm_config_location, help='JSON config file for boardfarm') parser.add_argument('-e', '--extend', metavar='', type=str, default=None, action="append", help='NAME of extra test to run') parser.add_argument('-f', '--filter', metavar='', type=str, default=None, action="append", help='Regex filter off arbitrary board parameters') parser.add_argument('-g', '--golden', metavar='', type=str, default=[], nargs='+', help='Path to JSON results to compare against (golden master)') parser.add_argument('-i', '--inventory', action='store_true', help='List available boards and exit') parser.add_argument('-k', '--kernel', metavar='', type=str, default=None, help='URL or file PATH of Kernel image to flash') parser.add_argument('-l', '--list_tests', action='store_true', help='List available tests and exit') parser.add_argument('-m', '--meta_img_loc', metavar='', type=str, default=None, help='URL or file PATH to meta image to flash') parser.add_argument('-n', '--board_names', metavar='', type=str, nargs='+', default=[], help='NAME(s) of boards to run on') owrt_tests_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "results", '') parser.add_argument('-o', '--output_dir', metavar='', type=str, default=owrt_tests_dir, help='Directory to output results files too') parser.add_argument('-p', '--package', metavar='', type=str, action="append", default=None, help='URL or file PATH of ipk install after boot') parser.add_argument('-q', '--feature', metavar='', type=str, default=[], nargs='+', help='Features required for this test run') parser.add_argument('-r', '--rootfs', metavar='', type=str, default=None, help='URL or file PATH of Rootfs image to flash') parser.add_argument('-s', '--sysupgrade', metavar='', type=str, default=None, help='URL or file PATH to Sysupgrade image') parser.add_argument('-t', '--retry', type=int, default=0, help='How many times to retry every test if it fails') parser.add_argument('-u', '--uboot', metavar='', type=str, default=None, help=argparse.SUPPRESS) parser.add_argument('-v', '--reboot-vms', action="store_true", help='Reboot VMs before starting tests') parser.add_argument('-w', '--wan', metavar='', type=str, default='dhcp', help='WAN protocol, dhcp (default) or pppoe') parser.add_argument('-x', '--testsuite', metavar='', type=str, default=None, help='NAME of test suite to run') parser.add_argument('-y', '--batch', action='store_true', help='Run in unattended mode - do not spawn console on failed test') parser.add_argument('-z', '--no-network', action='store_true', help='Skip basic network tests when booting') parser.add_argument('--bootargs', metavar='', type=str, default=None, help='bootargs to set or append to default args (board dependant)') parser.add_argument('--nfsroot', metavar='', type=str, default=None, help='URL or file PATH of Rootfs image to flash') parser.add_argument('--version', action='version', version='%(prog)s {}'.format(library.version), help='show version and exit') args = parser.parse_args() if args.list_tests: import tests tests.init(config) # Print all classes that are a subclass of TestCase for e in dir(tests): thing = getattr(tests, e) if inspect.isclass(thing) and \ issubclass(thing, unittest2.TestCase): try: print("%20s - %s" % (e, thing.__doc__.split('\n')[0])) except: print("%20s -" % e) sys.exit(0) try: if args.config_file.startswith("http"): data = urlopen(args.config_file).read().decode() else: data = open(args.config_file, 'r').read() config.boardfarm_config = json.loads(data) if 'locations' in config.boardfarm_config: location = config.boardfarm_config['locations'] del config.boardfarm_config['locations'] for board in config.boardfarm_config: if 'location' in config.boardfarm_config[board]: board_location = config.boardfarm_config[board]['location'] if board_location in location: for key, value in location[board_location].iteritems(): if type(value) == list: config.boardfarm_config[board][key].extend(value) else: config.boardfarm_config[board][key] = value except Exception as e: print(e) print('Unable to access/read Board Farm configuration\n%s' % boardfarm_config_location) sys.exit(1) # Check if boardfarm configuration is empty if not config.boardfarm_config: print("ERROR! Boardfarm config at %s is empty, so" % args.config_file) print("either all stations are in use or disabled.") sys.exit(10) # Check if given board type(s) have any overlap with available board types from config if args.board_type: all_board_types = [config.boardfarm_config[key].get('board_type') for key in config.boardfarm_config] if not (set(args.board_type) & set(all_board_types)): print("ERROR! You specified board types: %s " % " ".join(args.board_type)) print("but that is not an existing & available type of board.") print("Please choose a board type from:") print("\n".join([" * %s" % x for x in set(all_board_types)])) sys.exit(10) # Check if given board name(s) are present in available boards if args.board_names: all_board_names = [key for key in config.boardfarm_config if key != "locations"] if not (set(args.board_names) & set(all_board_names)): print("ERROR! You specified board names: %s " % " ".join(args.board_names)) print("but that is not an existing & available board.") print("Please choose a board name from:") print("\n".join([" * %s" % x for x in sorted(all_board_names)])) sys.exit(10) config.batch = args.batch if args.inventory: print("%11s %15s %5s %25s %25s %s" % ('Name', 'Model', 'Auto', 'LAN', 'WAN', 'Notes')) bf = config.boardfarm_config for i, b in enumerate(sorted(bf)): if args.board_type is None or bf[b].get('board_type') in args.board_type: if not args.board_names or b in args.board_names: info = {'name': b, 'type': bf[b].get('board_type'), 'wlan': bf[b].get('wlan_device') != None, 'auto': bf[b].get('available_for_autotests', True), 'conn_cmd': bf[b].get('conn_cmd'), 'lan_device': bf[b].get('lan_device', ''), 'wan_device': bf[b].get('wan_device', ''), 'notes': bf[b].get('notes', "")} if not args.filter or (args.filter and filter_boards(bf[b], args.filter)): print("%(name)11s %(type)15s %(auto)5s %(lan_device)25s %(wan_device)25s %(notes)s" % info) print("To connect to a board by name:\n ./bft -x connect -n NAME") print("To connect to any board of a given model:\n ./bft -x connect -b MODEL") sys.exit(0) if hasattr(config, 'INSTALL_PKGS') is False: config.INSTALL_PKGS = "" config.retry = args.retry if args.package: for pkg in args.package: config.INSTALL_PKGS += " %s" % pkg config.UBOOT = args.uboot config.KERNEL = args.kernel config.ROOTFS = args.rootfs config.NFSROOT = args.nfsroot config.META_BUILD = args.meta_img_loc # Quick check to make sure file url/path arguments are reasonable for x in (config.UBOOT, config.KERNEL, config.ROOTFS, config.META_BUILD): if x is None: continue if x.startswith('http://') or x.startswith('https://'): try: def add_basic_auth(login_str, request): '''Adds Basic auth to http request, pass in login:password as string''' import base64 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") authheader = "Basic %s" % encodeuser request.add_header("Authorization", authheader) import ssl context = ssl._create_unverified_context() req = urllib.Request(x) try: import netrc, urlparse n = netrc.netrc() login, unused, password = n.authenticators(urlparse.urlparse(x).hostname) add_basic_auth("%s:%s" % (login, password), req) except (TypeError, ImportError, IOError, netrc.NetrcParseError): pass # If url returns 404 or similar, raise exception urlopen(req, timeout=20, context=context) except Exception as e: print(e) print('Error trying to access %s' % x) sys.exit(1) else: if not os.path.isfile(x): print("File not found: %s" % x) sys.exit(1) if args.sysupgrade: config.SYSUPGRADE_NEW = args.sysupgrade if args.testsuite: config.TEST_SUITE = args.testsuite else: if args.extend: # One or more test cases was specified at command-line, just boot first. config.TEST_SUITE = "flash" else: # No test suite or test cases specified, so just boot and interact. config.TEST_SUITE = "interact" if args.extend: config.EXTRA_TESTS = args.extend config.EXTRA_TESTS += ["Interact"] config.output_dir = os.path.abspath(args.output_dir) + os.sep try: os.mkdir(config.output_dir) except: pass if args.analysis: import analysis for cstr in dir(analysis): c = getattr(analysis, cstr) if inspect.isclass(c) and issubclass(c, analysis.Analysis): sys.stdout.write("Running analysis class = %s... " % c) console_log = open(args.analysis, 'r').read() from analysis.analysis import prepare_log try: c().analyze(prepare_log(console_log), config.output_dir) print("DONE!") except Exception as e: print("FAILED!") traceback.print_exc(file=sys.stdout) continue exit(0) if args.board_type: library.print_bold("Selecting board from board type = %s" % args.board_type) config.BOARD_NAMES = [] possible_names = config.boardfarm_config if args.board_names: print("Board names = %s" % args.board_names) # Allow selection only from given set of board names possible_names = set(config.boardfarm_config) & set(args.board_names) for b in possible_names: if len(args.board_names) != 1 and \ 'available_for_autotests' in config.boardfarm_config[b] and \ config.boardfarm_config[b]['available_for_autotests'] == False: # Skip this board continue if args.feature != [] : if 'feature' not in config.boardfarm_config[b]: continue features = config.boardfarm_config[b]['feature'] if 'devices' in config.boardfarm_config[b]: seen_names = [] for d in config.boardfarm_config[b]['devices']: if 'feature' in d: # since we only connect to one type of device # we need to ignore the features on the other ones # even though they should be the same if d['name'] in seen_names: continue seen_names.append(d['name']) if type(d['feature']) is str or type(d['feature']) is unicode: d['feature'] = [d['feature']] features.extend(x for x in d['feature'] if x not in features) if type(features) is str or type(features) is unicode: features = [features] if set(args.feature) != set(args.feature) & set(features): continue for t in args.board_type: if config.boardfarm_config[b]['board_type'].lower() == t.lower(): if args.filter: if filter_boards(config.boardfarm_config[b], args.filter, b): config.BOARD_NAMES.append(b) else: config.BOARD_NAMES.append(b) if not config.BOARD_NAMES: print("ERROR! No boards meet selection requirements and have available_for_autotests = True.") sys.exit(10) else: if not args.board_names: print("ERROR") print("You must specify a board name with the '-n' argument:") print("./run-all.py -n 3000") print("That same board name must be present in boardfarm configuration.") sys.exit(1) else: config.BOARD_NAMES = args.board_names config.WAN_PROTO = args.wan config.reboot_vms = args.reboot_vms config.setup_device_networking = not args.no_network config.bootargs = args.bootargs config.golden = args.golden config.features = args.feature return config
# Abort early if there isn't one. if netrc_path is None: return ri = urlparse(url) # Strip port numbers from netloc. This weird `if...encode`` dance is # used for Python 3.2, which doesn't support unicode literals. splitstr = b':' if isinstance(url, str): splitstr = splitstr.decode('ascii') host = ri.netloc.split(splitstr)[0] try: _netrc = netrc(netrc_path).authenticators(host) if _netrc: # Return with login / password login_i = (0 if _netrc[0] else 1) return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, # we'll just skip netrc auth unless explicitly asked to raise errors. if raise_errors: raise # AppEngine hackiness. except (ImportError, AttributeError): pass
def getpassword(self): """Return the IMAP password for this repository. It tries to get passwords in the following order: 1. evaluate Repository 'remotepasseval' 2. read password from Repository 'remotepass' 3. read password from file specified in Repository 'remotepassfile' 4. read password from ~/.netrc 5. read password from /etc/netrc On success we return the password. If all strategies fail we return None.""" # 1. Evaluate Repository 'remotepasseval'. passwd = self.getconf('remotepasseval', None) if passwd is not None: l_pass = self.localeval.eval(passwd) # We need a str password if isinstance(l_pass, bytes): return l_pass.decode(encoding='utf-8') elif isinstance(l_pass, str): return l_pass # If is not bytes or str, we have a problem raise OfflineImapError("Could not get a right password format for" " repository %s. Type found: %s. " "Please, open a bug." % (self.name, type(l_pass)), OfflineImapError.ERROR.FOLDER) # 2. Read password from Repository 'remotepass'. password = self.getconf('remotepass', None) if password is not None: # Assume the configuration file to be UTF-8 encoded so we must not # encode this string again. return password # 3. Read password from file specified in Repository 'remotepassfile'. passfile = self.getconf('remotepassfile', None) if passfile is not None: file_desc = open(os.path.expanduser(passfile), 'r', encoding='utf-8') password = file_desc.readline().strip() file_desc.close() # We need a str password if isinstance(password, bytes): return password.decode(encoding='utf-8') elif isinstance(password, str): return password # If is not bytes or str, we have a problem raise OfflineImapError("Could not get a right password format for" " repository %s. Type found: %s. " "Please, open a bug." % (self.name, type(password)), OfflineImapError.ERROR.FOLDER) # 4. Read password from ~/.netrc. try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: user = self.getuser() if user is None or user == netrcentry[0]: return netrcentry[2] # 5. Read password from /etc/netrc. try: netrcentry = netrc.netrc('/etc/netrc')\ .authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: user = self.getuser() if user is None or user == netrcentry[0]: return netrcentry[2] # No strategy yielded a password! return None
def open_url(url, data=None, headers=None, method=None, use_proxy=True, force=False, last_mod_time=None, timeout=10, validate_certs=True, url_username=None, url_password=None, http_agent=None, force_basic_auth=False, follow_redirects='urllib2', client_cert=None, client_key=None, cookies=None): ''' Sends a request via HTTP(S) or FTP using urllib2 (Python2) or urllib (Python3) Does not require the module environment ''' handlers = [] ssl_handler = maybe_add_ssl_handler(url, validate_certs) if ssl_handler: handlers.append(ssl_handler) parsed = generic_urlparse(urlparse(url)) if parsed.scheme != 'ftp': username = url_username if headers is None: headers = {} if username: password = url_password netloc = parsed.netloc elif '@' in parsed.netloc: credentials, netloc = parsed.netloc.split('@', 1) if ':' in credentials: username, password = credentials.split(':', 1) else: username = credentials password = '' parsed_list = parsed.as_list() parsed_list[1] = netloc # reconstruct url without credentials url = urlunparse(parsed_list) if username and not force_basic_auth: passman = urllib_request.HTTPPasswordMgrWithDefaultRealm() # this creates a password manager passman.add_password(None, netloc, username, password) # because we have put None at the start it will always # use this username/password combination for urls # for which `theurl` is a super-url authhandler = urllib_request.HTTPBasicAuthHandler(passman) digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman) # create the AuthHandler handlers.append(authhandler) handlers.append(digest_authhandler) elif username and force_basic_auth: headers["Authorization"] = basic_auth_header(username, password) else: try: rc = netrc.netrc(os.environ.get('NETRC')) login = rc.authenticators(parsed.hostname) except IOError: login = None if login: username, _, password = login if username and password: headers["Authorization"] = basic_auth_header( username, password) if not use_proxy: proxyhandler = urllib_request.ProxyHandler({}) handlers.append(proxyhandler) if HAS_SSLCONTEXT and not validate_certs: # In 2.7.9, the default context validates certificates context = SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 context.options |= ssl.OP_NO_SSLv3 context.verify_mode = ssl.CERT_NONE context.check_hostname = False handlers.append( HTTPSClientAuthHandler(client_cert=client_cert, client_key=client_key, context=context)) elif client_cert: handlers.append( HTTPSClientAuthHandler(client_cert=client_cert, client_key=client_key)) # pre-2.6 versions of python cannot use the custom https # handler, since the socket class is lacking create_connection. # Some python builds lack HTTPS support. if hasattr(socket, 'create_connection') and CustomHTTPSHandler: handlers.append(CustomHTTPSHandler) handlers.append(RedirectHandlerFactory(follow_redirects, validate_certs)) # add some nicer cookie handling if cookies is not None: handlers.append(urllib_request.HTTPCookieProcessor(cookies)) opener = urllib_request.build_opener(*handlers) urllib_request.install_opener(opener) data = to_bytes(data, nonstring='passthru') if method: if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT', 'PATCH'): raise ConnectionError('invalid HTTP request method; %s' % method.upper()) request = RequestWithMethod(url, method.upper(), data) else: request = urllib_request.Request(url, data) # add the custom agent header, to help prevent issues # with sites that block the default urllib agent string if http_agent: request.add_header('User-agent', http_agent) # Cache control # Either we directly force a cache refresh if force: request.add_header('cache-control', 'no-cache') # or we do it if the original is more recent than our copy elif last_mod_time: tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000') request.add_header('If-Modified-Since', tstamp) # user defined headers now, which may override things we've set above if headers: if not isinstance(headers, dict): raise ValueError("headers provided to fetch_url() must be a dict") for header in headers: request.add_header(header, headers[header]) urlopen_args = [request, None] if sys.version_info >= (2, 6, 0): # urlopen in python prior to 2.6.0 did not # have a timeout parameter urlopen_args.append(timeout) r = urllib_request.urlopen(*urlopen_args) return r
def get_credentials(mach): # default is $HOME/.netrc netrc_machine = mach info = netrc.netrc() (login, account, password) = info.authenticators(netrc_machine) return (login, password)
#!/usr/bin/env python import sys import requests import pandas as pd import psycopg2 from netrc import netrc user, acct, passwd = netrc().authenticators("harris") user, acct, apikey = netrc().authenticators("census") fips = [ 2, 1, 5, 4, 6, 8, 9, 11, 10, 12, 13, 15, 19, 16, 17, 18, 20, 21, 22, 25, 24, 23, 26, 27, 29, 28, 30, 37, 38, 31, 33, 34, 35, 32, 36, 39, 40, 41, 42, 44, 45, 46, 47, 48, 49, 51, 50, 53, 55, 54, 56 ] # fips = [1, 37, 42, 55] apibase = "http://api.census.gov/data/" tracts00 = apibase + "2000/sf1?key=%s&get=P001001&for=tract:*&in=state:{}" % apikey blocks00 = apibase + "2000/sf1?key=%s&get=P001001,P003004,P004002,P005001,P005004,P006002&for=block:*&in=state:{}+county:{}+tract:{}" % apikey tracts10 = apibase + "2010/sf1?key=%s&get=P0010001&for=tract:*&in=state:{}" % apikey blocks10 = apibase + "2010/sf1?key=%s&get=P0010001,P0030003,P0040003,P0100001,P0100004,P0110002&for=block:*&in=state:{}+county:{}+tract:{}" % apikey for y, tr, bl in []: # [[2000, tracts00, blocks00], [2010, tracts10, blocks10]]: with open("block_pop{}.csv".format(y), "w") as out: out.write("s,c,t,b,population,black,hispanic,vap,bvap,hvap\n") for s in sorted(fips):
## added this to /etc/modules, so most likely not necessary here ## testing this next time os.system('modprobe w1-gpio') os.system('modprobe w1-therm') ## setting the correct files of the sensors #device_folder = glob.glob('/sys/bus/w1/devices/10*')[0] #device_file = [device_folder + '/w1_slave']#, device_folder[1] + '/w1_slave' base_dir = '/sys/bus/w1/devices/' device_folder = glob.glob(base_dir + '10*')[0] device_file = device_folder + '/w1_slave' ##connecting to database HOST = '192.168.1.2psql' secrets = netrc.netrc() username, account, password = secrets.authenticators( HOST ) try: conn = psycopg2.connect("dbname='temp' user="******" host='localhost' password="******"connected to database temp!" except: print "I am unable to connect to the database" cur = conn.cursor() ## reading actuall temperature of two sensors def read_temp_raw(): #f1 = open(device_file[0], 'r') #lines1 = f1.readlines() #f1.close()
def main(): #-- Read the system arguments listed after the program parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO and auxiliary data from the NASA JPL PO.DAAC Drive Server. Syncs GRACE/GRACE-FO Level-1b dealiasing products (AOD1B). Gets the latest technical note (TN) files. Gets the monthly GRACE/GRACE-FO newsletters. """ ) #-- command line parameters #-- NASA Earthdata credentials parser.add_argument('--user','-U', type=str, default=os.environ.get('EARTHDATA_USERNAME'), help='Username for NASA Earthdata Login') parser.add_argument('--webdav','-W', type=str, default=os.environ.get('PODAAC_PASSWORD'), help='WebDAV Password for JPL PO.DAAC Drive Login') parser.add_argument('--netrc','-N', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.path.join(os.path.expanduser('~'),'.netrc'), help='Path to .netrc file for authentication') #-- working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') #-- GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') #-- GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') #-- GRACE/GRACE-FO dealiasing products parser.add_argument('--aod1b','-a', default=False, action='store_true', help='Sync GRACE/GRACE-FO Level-1B dealiasing products') #-- GRACE/GRACE-FO newsletters parser.add_argument('--newsletters','-n', default=False, action='store_true', help='Sync GRACE/GRACE-FO Newsletters') #-- connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') #-- Output log file in form #-- PODAAC_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') #-- sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') parser.add_argument('--checksum', default=False, action='store_true', help='Compare hashes to check for overwriting existing data') parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') #-- permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') args,_ = parser.parse_known_args() #-- JPL PO.DAAC drive hostname HOST = 'podaac-tools.jpl.nasa.gov' #-- get NASA Earthdata and JPL PO.DAAC drive credentials try: args.user,_,args.webdav = netrc.netrc(args.netrc).authenticators(HOST) except: #-- check that NASA Earthdata credentials were entered if not args.user: prompt = 'Username for {0}: '.format(HOST) args.user = builtins.input(prompt) #-- enter WebDAV password securely from command-line if not args.webdav: prompt = 'Password for {0}@{1}: '.format(args.user,HOST) args.webdav = getpass.getpass(prompt) #-- build a urllib opener for PO.DAAC Drive #-- Add the username and password for NASA Earthdata Login system gravity_toolkit.utilities.build_opener(args.user,args.webdav) #-- check internet connection before attempting to run program #-- check JPL PO.DAAC Drive credentials before attempting to run program if gravity_toolkit.utilities.check_credentials(): podaac_grace_sync(args.directory, args.center, DREL=args.release, NEWSLETTERS=args.newsletters, AOD1B=args.aod1b, TIMEOUT=args.timeout, LIST=args.list, LOG=args.log, CLOBBER=args.clobber, CHECKSUM=args.checksum, MODE=args.mode)
import os import warnings import tqdm import requests import netrc from IPython import embed from mangadap.tests.util import remote_data_file, remote_data_files from mangadap.tests.util import drp_test_version, dap_test_version try: NETRC = netrc.netrc() except Exception as e: NETRC = None warnings.warn('Could not load ~/.netrc file. Attempting to pull from DR17.') HOST = 'data.sdss.org' if NETRC is not None and HOST not in NETRC.hosts: NETRC = None warnings.warn('Host data.sdss.org is not defined in your ~/.netrc file. ' 'Attempting to pull from DR17.') def download_file(remote_root, usr, passwd, local_root, file, overwrite=False): """ Thanks to https://stackoverflow.com/questions/37573483/progress-bar-while-download-file-over-http-with-requests/37573701 """ #Beware of how this is joined! url = f'{remote_root}{file}'
def Execute(self, opt, args): if opt.jobs: self.jobs = opt.jobs if self.jobs > 1: soft_limit, _ = _rlimit_nofile() self.jobs = min(self.jobs, (soft_limit - 5) / 3) if opt.network_only and opt.detach_head: print('error: cannot combine -n and -d', file=sys.stderr) sys.exit(1) if opt.network_only and opt.local_only: print('error: cannot combine -n and -l', file=sys.stderr) sys.exit(1) if opt.manifest_name and opt.smart_sync: print('error: cannot combine -m and -s', file=sys.stderr) sys.exit(1) if opt.manifest_name and opt.smart_tag: print('error: cannot combine -m and -t', file=sys.stderr) sys.exit(1) if opt.manifest_server_username or opt.manifest_server_password: if not (opt.smart_sync or opt.smart_tag): print('error: -u and -p may only be combined with -s or -t', file=sys.stderr) sys.exit(1) if None in [ opt.manifest_server_username, opt.manifest_server_password ]: print('error: both -u and -p must be given', file=sys.stderr) sys.exit(1) if opt.manifest_name: self.manifest.Override(opt.manifest_name) manifest_name = opt.manifest_name if opt.smart_sync or opt.smart_tag: if not self.manifest.manifest_server: print( 'error: cannot smart sync: no manifest server defined in ' 'manifest', file=sys.stderr) sys.exit(1) manifest_server = self.manifest.manifest_server if not opt.quiet: print('Using manifest server %s' % manifest_server) if not '@' in manifest_server: username = None password = None if opt.manifest_server_username and opt.manifest_server_password: username = opt.manifest_server_username password = opt.manifest_server_password else: try: info = netrc.netrc() except IOError: print( '.netrc file does not exist or could not be opened', file=sys.stderr) else: try: parse_result = urllib.parse.urlparse( manifest_server) if parse_result.hostname: username, _account, password = \ info.authenticators(parse_result.hostname) except TypeError: # TypeError is raised when the given hostname is not present # in the .netrc file. print('No credentials found for %s in .netrc' % parse_result.hostname, file=sys.stderr) except netrc.NetrcParseError as e: print('Error parsing .netrc file: %s' % e, file=sys.stderr) if (username and password): manifest_server = manifest_server.replace( '://', '://%s:%s@' % (username, password), 1) try: server = xmlrpc.client.Server(manifest_server) if opt.smart_sync: p = self.manifest.manifestProject b = p.GetBranch(p.CurrentBranch) branch = b.merge if branch.startswith(R_HEADS): branch = branch[len(R_HEADS):] env = os.environ.copy() if 'SYNC_TARGET' in env: target = env['SYNC_TARGET'] [success, manifest_str ] = server.GetApprovedManifest(branch, target) elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: target = '%s-%s' % (env['TARGET_PRODUCT'], env['TARGET_BUILD_VARIANT']) [success, manifest_str ] = server.GetApprovedManifest(branch, target) else: [success, manifest_str] = server.GetApprovedManifest(branch) else: assert (opt.smart_tag) [success, manifest_str] = server.GetManifest(opt.smart_tag) if success: manifest_name = "smart_sync_override.xml" manifest_path = os.path.join( self.manifest.manifestProject.worktree, manifest_name) try: f = open(manifest_path, 'w') try: f.write(manifest_str) finally: f.close() except IOError: print('error: cannot write manifest to %s' % manifest_path, file=sys.stderr) sys.exit(1) self._ReloadManifest(manifest_name) else: print('error: manifest server RPC call failed: %s' % manifest_str, file=sys.stderr) sys.exit(1) except (socket.error, IOError, xmlrpc.client.Fault) as e: print('error: cannot connect to manifest server %s:\n%s' % (self.manifest.manifest_server, e), file=sys.stderr) sys.exit(1) except xmlrpc.client.ProtocolError as e: print('error: cannot connect to manifest server %s:\n%d %s' % (self.manifest.manifest_server, e.errcode, e.errmsg), file=sys.stderr) sys.exit(1) rp = self.manifest.repoProject rp.PreSync() mp = self.manifest.manifestProject mp.PreSync() if opt.repo_upgraded: _PostRepoUpgrade(self.manifest, quiet=opt.quiet) if not opt.local_only: mp.Sync_NetworkHalf(quiet=opt.quiet, current_branch_only=opt.current_branch_only, no_tags=opt.no_tags) if mp.HasChanges: syncbuf = SyncBuffer(mp.config) mp.Sync_LocalHalf(syncbuf) if not syncbuf.Finish(): sys.exit(1) self._ReloadManifest(manifest_name) if opt.jobs is None: self.jobs = self.manifest.default.sync_j all_projects = self.GetProjects(args, missing_ok=True, submodules_ok=opt.fetch_submodules) self._fetch_times = _FetchTimes(self.manifest) if not opt.local_only: to_fetch = [] now = time.time() if _ONE_DAY_S <= (now - rp.LastFetch): to_fetch.append(rp) to_fetch.extend(all_projects) to_fetch.sort(key=self._fetch_times.Get, reverse=True) fetched = self._Fetch(to_fetch, opt) _PostRepoFetch(rp, opt.no_repo_verify) if opt.network_only: # bail out now; the rest touches the working tree return # Iteratively fetch missing and/or nested unregistered submodules previously_missing_set = set() while True: self._ReloadManifest(manifest_name) all_projects = self.GetProjects( args, missing_ok=True, submodules_ok=opt.fetch_submodules) missing = [] for project in all_projects: if project.gitdir not in fetched: missing.append(project) if not missing: break # Stop us from non-stopped fetching actually-missing repos: If set of # missing repos has not been changed from last fetch, we break. missing_set = set(p.name for p in missing) if previously_missing_set == missing_set: break previously_missing_set = missing_set fetched.update(self._Fetch(missing, opt)) if self.manifest.IsMirror or self.manifest.IsArchive: # bail out now, we have no working tree return if self.UpdateProjectList(): sys.exit(1) syncbuf = SyncBuffer(mp.config, detach_head=opt.detach_head) pm = Progress('Syncing work tree', len(all_projects)) for project in all_projects: pm.update() if project.worktree: project.Sync_LocalHalf(syncbuf) pm.end() print(file=sys.stderr) if not syncbuf.Finish(): sys.exit(1) # If there's a notice that's supposed to print at the end of the sync, print # it now... if self.manifest.notice: print(self.manifest.notice)
def checkstatus(self, fetch, ud, d, try_again=True): class HTTPConnectionCache(http.client.HTTPConnection): if fetch.connection_cache: def connect(self): """Connect to the host and port specified in __init__.""" sock = fetch.connection_cache.get_connection( self.host, self.port) if sock: self.sock = sock else: self.sock = socket.create_connection( (self.host, self.port), self.timeout, self.source_address) fetch.connection_cache.add_connection( self.host, self.port, self.sock) if self._tunnel_host: self._tunnel() class CacheHTTPHandler(urllib.request.HTTPHandler): def http_open(self, req): return self.do_open(HTTPConnectionCache, req) def do_open(self, http_class, req): """Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code """ host = req.host if not host: raise urllib.error.URLError('no host given') h = http_class(host, timeout=req.timeout) # will parse host:port h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update( dict((k, v) for k, v in list(req.headers.items()) if k not in headers)) # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. # Don't close connection when connection_cache is enabled, if fetch.connection_cache is None: headers["Connection"] = "close" else: headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 headers = dict( (name.title(), val) for name, val in list(headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[ proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # XXX what error? # Don't close connection when cache is enabled. # Instead, try to detect connections that are no longer # usable (for example, closed unexpectedly) and remove # them from the cache. if fetch.connection_cache is None: h.close() elif isinstance(err, OSError) and err.errno == errno.EBADF: # This happens when the server closes the connection despite the Keep-Alive. # Apparently urllib then uses the file descriptor, expecting it to be # connected, when in reality the connection is already gone. # We let the request fail and expect it to be # tried once more ("try_again" in check_status()), # with the dead connection removed from the cache. # If it still fails, we give up, which can happend for bad # HTTP proxy settings. fetch.connection_cache.remove_connection( h.host, h.port) raise urllib.error.URLError(err) else: r = h.getresponse() # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. # Wrap the HTTPResponse object in socket's file object adapter # for Windows. That adapter calls recv(), so delegate recv() # to read(). This weird wrapping allows the returned object to # have readline() and readlines() methods. # XXX It might be better to extract the read buffering code # out of socket._fileobject() and into a base class. r.recv = r.read # no data, just have to read r.read() class fp_dummy(object): def read(self): return "" def readline(self): return "" def close(self): pass closed = False resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason # Close connection when server request it. if fetch.connection_cache is not None: if 'Connection' in r.msg and r.msg['Connection'] == 'close': fetch.connection_cache.remove_connection( h.host, h.port) return resp class HTTPMethodFallback(urllib.request.BaseHandler): """ Fallback to GET if HEAD is not allowed (405 HTTP error) """ def http_error_405(self, req, fp, code, msg, headers): fp.read() fp.close() if req.get_method() != 'GET': newheaders = dict( (k, v) for k, v in list(req.headers.items()) if k.lower() not in ("content-length", "content-type")) return self.parent.open( urllib.request.Request( req.get_full_url(), headers=newheaders, origin_req_host=req.origin_req_host, unverifiable=True)) raise urllib.request.HTTPError(req, code, msg, headers, None) # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403 # Forbidden when they actually mean 405 Method Not Allowed. http_error_403 = http_error_405 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): """ urllib2.HTTPRedirectHandler resets the method to GET on redirect, when we want to follow redirects using the original method. """ def redirect_request(self, req, fp, code, msg, headers, newurl): newreq = urllib.request.HTTPRedirectHandler.redirect_request( self, req, fp, code, msg, headers, newurl) newreq.get_method = req.get_method return newreq # We need to update the environment here as both the proxy and HTTPS # handlers need variables set. The proxy needs http_proxy and friends to # be set, and HTTPSHandler ends up calling into openssl to load the # certificates. In buildtools configurations this will be looking at the # wrong place for certificates by default: we set SSL_CERT_FILE to the # right location in the buildtools environment script but as BitBake # prunes prunes the environment this is lost. When binaries are executed # runfetchcmd ensures these values are in the environment, but this is # pure Python so we need to update the environment. # # Avoid tramping the environment too much by using bb.utils.environment # to scope the changes to the build_opener request, which is when the # environment lookups happen. newenv = {} for name in bb.fetch2.FETCH_EXPORT_VARS: value = d.getVar(name) if not value: origenv = d.getVar("BB_ORIGENV") if origenv: value = origenv.getVar(name) if value: newenv[name] = value with bb.utils.environment(**newenv): import ssl if self.check_certs(d): context = ssl.create_default_context() else: context = ssl._create_unverified_context() handlers = [ FixedHTTPRedirectHandler, HTTPMethodFallback, urllib.request.ProxyHandler(), CacheHTTPHandler(), urllib.request.HTTPSHandler(context=context) ] opener = urllib.request.build_opener(*handlers) try: uri = ud.url.split(";")[0] r = urllib.request.Request(uri) r.get_method = lambda: "HEAD" # Some servers (FusionForge, as used on Alioth) require that the # optional Accept header is set. r.add_header("Accept", "*/*") r.add_header("User-Agent", self.user_agent) def add_basic_auth(login_str, request): '''Adds Basic auth to http request, pass in login:password as string''' import base64 encodeuser = base64.b64encode( login_str.encode('utf-8')).decode("utf-8") authheader = "Basic %s" % encodeuser r.add_header("Authorization", authheader) if ud.user and ud.pswd: add_basic_auth(ud.user + ':' + ud.pswd, r) try: import netrc n = netrc.netrc() login, unused, password = n.authenticators( urllib.parse.urlparse(uri).hostname) add_basic_auth("%s:%s" % (login, password), r) except (TypeError, ImportError, IOError, netrc.NetrcParseError): pass with opener.open(r, timeout=30) as response: pass except urllib.error.URLError as e: if try_again: logger.debug2("checkstatus: trying again") return self.checkstatus(fetch, ud, d, False) else: # debug for now to avoid spamming the logs in e.g. remote sstate searches logger.debug2("checkstatus() urlopen failed: %s" % e) return False except ConnectionResetError as e: if try_again: logger.debug2("checkstatus: trying again") return self.checkstatus(fetch, ud, d, False) else: # debug for now to avoid spamming the logs in e.g. remote sstate searches logger.debug2("checkstatus() urlopen failed: %s" % e) return False return True
def check_options(_options): """function that checks the given options for coherency.""" # Check Mandatory Options on AUTHENTICATION if (_options.auth_mode != AUTHENTICATION_MODE_NONE and _options.auth_mode != AUTHENTICATION_MODE_BASIC and _options.auth_mode != AUTHENTICATION_MODE_CAS): raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.invalid'] % (_options.auth_mode, 'auth-mode', [ AUTHENTICATION_MODE_NONE, AUTHENTICATION_MODE_BASIC, AUTHENTICATION_MODE_CAS ])) # those following parameters are required if _options.motu is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory'] % 'motu') if (_options.auth_mode != AUTHENTICATION_MODE_NONE) and ( _options.user is None or _options.pwd is None): # if authentication mode is set we check both user & password presence try: n = netrc.netrc() cred = n.authenticators(_options.motu.split('/')[2]) if cred is not None: _options.user = cred[0] _options.pwd = cred[2] except Exception as ex: log.warn("Unable to read netrc configuration: %s" % ex) if _options.user is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory.user'] % ('user', _options.auth_mode)) elif _options.pwd is None: raise Exception( utils_messages.get_external_messages() ['motuclient.exception.option.mandatory.password'] % ('pwd', _options.user)) elif (_options.auth_mode == AUTHENTICATION_MODE_NONE and _options.user is not None): # check that if a user is set, an authentication mode should also be set raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory.mode'] % (AUTHENTICATION_MODE_NONE, 'auth-mode', _options.user)) elif (_options.pwd is None and _options.user is not None): # check that if a user is set, a password should be set also raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory.password'] % ('pwd', _options.user)) if _options.service_id is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory'] % 'service-id') if _options.product_id is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory'] % 'product-id') if _options.out_dir is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory'] % 'out-dir') out_dir = _options.out_dir if not out_dir.startswith("console"): # check directory existence if not os.path.exists(out_dir): raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.outdir-notexist'] % out_dir) # check whether directory is writable or not if not os.access(out_dir, os.W_OK): raise Exception( utils_messages.get_external_messages() ['motuclient.exception.option.outdir-notwritable'] % out_dir) if _options.out_name is None: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.mandatory'] % 'out-name') # Check PROXY Options _options.proxy = False if (_options.proxy_server is not None) and (len(_options.proxy_server) != 0): _options.proxy = True # check that proxy server is a valid url url = _options.proxy_server p = re.compile( '^(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?' ) m = p.match(url) if not m: raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.not-url'] % ('proxy-server', url)) # check that if proxy-user is defined then proxy-pwd shall be also, and reciprocally. if (_options.proxy_user is not None) != (_options.proxy_pwd is not None): raise Exception(utils_messages.get_external_messages() ['motuclient.exception.option.linked'] % ('proxy-user', 'proxy-name')) # Check VERTICAL Options # No need # Check TEMPORAL Options # No need """ MOTU-172 #Check OUTPUT Options _options.extraction_output = False if _options.outputWritten is not None : _options.extraction_output = True """ # Check GEOGRAPHIC Options _options.extraction_geographic = False if _options.latitude_min is not None or _options.latitude_max is not None or _options.longitude_min is not None or _options.longitude_max is not None: _options.extraction_geographic = True check_latitude(_options.latitude_min, 'latitude_min') check_latitude(_options.latitude_max, 'latitude_max') check_coordinate(_options.longitude_min, 'longitude_min') check_coordinate(_options.longitude_max, 'longitude_max')
def from_nsidc(HOST, username=None, password=None, build=True, timeout=None, local=None, hash='', chunk=16384, verbose=False, fid=sys.stdout, mode=0o775): """ Download a file from a NSIDC https server Arguments --------- HOST: remote https host path split as list Keyword arguments ----------------- username: NASA Earthdata username password: NASA Earthdata password build: Build opener and check NASA Earthdata credentials timeout: timeout in seconds for blocking operations local: path to local file hash: MD5 hash of local file chunk: chunk size for transfer encoding verbose: print file transfer information fid: open file object to print if verbose mode: permissions mode of output local file Returns ------- remote_buffer: BytesIO representation of file response_error: notification for response error """ #-- create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) #-- use netrc credentials if build and not (username or password): urs = 'urs.earthdata.nasa.gov' username, login, password = netrc.netrc().authenticators(urs) #-- build urllib2 opener and check credentials if build: #-- build urllib2 opener with credentials build_opener(username, password) #-- check credentials check_credentials() #-- verify inputs for remote https host if isinstance(HOST, str): HOST = url_split(HOST) #-- try downloading from https try: #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) response = urllib2.urlopen(request, timeout=timeout) except: response_error = 'Download error from {0}'.format( posixpath.join(*HOST)) return (False, response_error) else: #-- copy remote file contents to bytesIO object remote_buffer = io.BytesIO() shutil.copyfileobj(response, remote_buffer, chunk) remote_buffer.seek(0) #-- save file basename with bytesIO object remote_buffer.filename = HOST[-1] #-- generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() #-- compare checksums if local and (hash != remote_hash): #-- convert to absolute path local = os.path.abspath(local) #-- create directory if non-existent if not os.access(os.path.dirname(local), os.F_OK): os.makedirs(os.path.dirname(local), mode) #-- print file information args = (posixpath.join(*HOST), local) logging.info('{0} -->\n\t{1}'.format(*args)) #-- store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(os.path.expanduser(local), 'wb') as f: shutil.copyfileobj(remote_buffer, f, chunk) #-- change the permissions mode os.chmod(local, mode) #-- return the bytesIO object remote_buffer.seek(0) return (remote_buffer, None)
headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" } params = urlencode({'user': user, 'response': response}) conn.request("POST", '/login_sid.lua', params, headers) r1 = conn.getresponse() response_xml = r1.read() root = ET.fromstring(response_xml) return root.findall('SID')[0].text if __name__ == '__main__': host = '192.168.178.1' nrc = netrc() (user, account, password) = nrc.authenticators(host) fb = FritzBoxWeb(host) fb.login(user, password) print(fb.session_id) conn = fb.connect() conn.request( "GET", '/home/home.lua?sid={session_id}'.format(session_id=fb.session_id)) r = conn.getresponse() print(r.read())
def attempt_login(urs, context=ssl.SSLContext(), password_manager=True, get_ca_certs=False, redirect=False, authorization_header=False, **kwargs): """ attempt to build a urllib opener for NASA Earthdata Arguments --------- urs: Earthdata login URS 3 host Keyword arguments ----------------- context: SSL context for opener object password_manager: create password manager context using default realm get_ca_certs: get list of loaded “certification authority” certificates redirect: create redirect handler object authorization_header: add base64 encoded authorization header to opener username: NASA Earthdata username password: NASA Earthdata password retries: number of retry attempts netrc: path to .netrc file for authentication """ # set default keyword arguments kwargs.setdefault('username', os.environ.get('EARTHDATA_USERNAME')) kwargs.setdefault('password', os.environ.get('EARTHDATA_PASSWORD')) kwargs.setdefault('retries', 5) kwargs.setdefault('netrc', os.path.expanduser('~/.netrc')) try: # only necessary on jupyterhub os.chmod(kwargs['netrc'], 0o600) # try retrieving credentials from netrc username, _, password = netrc.netrc( kwargs['netrc']).authenticators(urs) except Exception as e: # try retrieving credentials from environmental variables username, password = (kwargs['username'], kwargs['password']) pass # if username or password are not available if not username: username = builtins.input('Username for {0}: '.format(urs)) if not password: prompt = 'Password for {0}@{1}: '.format(username, urs) password = getpass.getpass(prompt=prompt) # for each retry for retry in range(kwargs['retries']): # build an opener for urs with credentials opener = build_opener(username, password, context=context, password_manager=password_manager, get_ca_certs=get_ca_certs, redirect=redirect, authorization_header=authorization_header, urs=urs) # try logging in by check credentials try: check_credentials() except Exception as e: pass else: return opener # reattempt login username = builtins.input('Username for {0}: '.format(urs)) password = getpass.getpass(prompt=prompt) # reached end of available retries raise RuntimeError('End of Retries: Check NASA Earthdata credentials')
def nsidc_list(HOST, username=None, password=None, build=True, timeout=None, parser=lxml.etree.HTMLParser(), pattern='', sort=False): """ List a directory on NSIDC Arguments --------- HOST: remote https host path split as list Keyword arguments ----------------- username: NASA Earthdata username password: NASA Earthdata password build: Build opener and check NASA Earthdata credentials timeout: timeout in seconds for blocking operations parser: HTML parser for lxml pattern: regular expression pattern for reducing list sort: sort output list Returns ------- colnames: list of column names in a directory collastmod: list of last modification times for items in the directory colerror: notification for list error """ #-- use netrc credentials if build and not (username or password): urs = 'urs.earthdata.nasa.gov' username, login, password = netrc.netrc().authenticators(urs) #-- build urllib2 opener and check credentials if build: #-- build urllib2 opener with credentials build_opener(username, password) #-- check credentials check_credentials() #-- verify inputs for remote https host if isinstance(HOST, str): HOST = url_split(HOST) #-- try listing from https try: #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) tree = lxml.etree.parse(urllib2.urlopen(request, timeout=timeout), parser) except (urllib2.HTTPError, urllib2.URLError) as e: colerror = 'List error from {0}'.format(posixpath.join(*HOST)) return (False, False, colerror) else: #-- read and parse request for files (column names and modified times) colnames = tree.xpath('//td[@class="indexcolname"]//a/@href') #-- get the Unix timestamp value for a modification time collastmod = [ get_unix_time(i, format='%Y-%m-%d %H:%M') for i in tree.xpath('//td[@class="indexcollastmod"]/text()') ] #-- reduce using regular expression pattern if pattern: i = [i for i, f in enumerate(colnames) if re.search(pattern, f)] #-- reduce list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- sort the list if sort: i = [i for i, j in sorted(enumerate(colnames), key=lambda i: i[1])] #-- sort list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- return the list of column names and last modified times return (colnames, collastmod, None)
# Création du fichier de sauvegarde qui copie tous les fichiers du répertoire wordpress # Ainsi que le fichier Virtualhost du serveur Apache tar = tarfile.open(dir_backup + nom_backup_file, 'w:gz') tar.add(dir_wordpress) tar.add(dir_site_apache + virtual_host) tar.close() # Récupération de la liste des fichiers de sauvegarde pour serveur local et FTP backup_files = supp_old_backup(backup_type[0], base_name, dir_backup) backup_bases = supp_old_backup(backup_type[1], base_name, dir_backup) # ##### ACTIONS SUR LE SERVEUR FTP ###### # # Récupération des informations de connexion du serveur ftp contenu dans le fichier protégé .netrc try: netrc = netrc.netrc() auth_ftp = netrc.authenticators(ftp_host) except FileNotFoundError: print( 'Warning : Fichier d\'identification FTP non présent. L\'accès au serveur impossible' ) sys.exit(2) # Partie de connexion au serveur FTP et transfert des fichiers de sauvegarde with ftplib.FTP(host=ftp_host, user=auth_ftp[0], passwd=auth_ftp[2]) as ftp: try: print(ftp.getwelcome()) ftp.cwd(dir_ftp) ftp.storbinary('STOR ' + nom_backup_file, open(dir_backup + nom_backup_file, 'rb')) ftp.storbinary('STOR ' + nom_backup_base,
def drive_list(HOST,username=None,password=None,build=True,timeout=None, urs='podaac-tools.jpl.nasa.gov',parser=lxml.etree.HTMLParser(), pattern='',sort=False): """ List a directory on JPL PO.DAAC or ECCO Drive Arguments --------- HOST: remote https host path split as list Keyword arguments ----------------- username: NASA Earthdata username password: JPL PO.DAAC Drive WebDAV password build: Build opener and check WebDAV credentials timeout: timeout in seconds for blocking operations urs: JPL PO.DAAC or ECCO login URS 3 host parser: HTML parser for lxml pattern: regular expression pattern for reducing list sort: sort output list Returns ------- colnames: list of column names in a directory collastmod: list of last modification times for items in the directory """ #-- use netrc credentials if build and not (username or password): username,_,password = netrc.netrc().authenticators(urs) #-- build urllib2 opener and check credentials if build: #-- build urllib2 opener with credentials build_opener(username, password) #-- check credentials check_credentials() #-- try listing from https try: #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) tree = lxml.etree.parse(urllib2.urlopen(request,timeout=timeout),parser) except (urllib2.HTTPError, urllib2.URLError): raise Exception('List error from {0}'.format(posixpath.join(*HOST))) else: #-- read and parse request for files (column names and modified times) colnames = tree.xpath('//tr/td//a[@class="text-left"]/text()') #-- get the Unix timestamp value for a modification time collastmod = [get_unix_time(i) for i in tree.xpath('//tr/td[3]/text()')] #-- reduce using regular expression pattern if pattern: i = [i for i,f in enumerate(colnames) if re.search(pattern,f)] #-- reduce list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- sort the list if sort: i = [i for i,j in sorted(enumerate(colnames), key=lambda i: i[1])] #-- sort list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] #-- return the list of column names and last modified times return (colnames,collastmod)
def _SmartSyncSetup(self, opt, smart_sync_manifest_path): if not self.manifest.manifest_server: print('error: cannot smart sync: no manifest server defined in ' 'manifest', file=sys.stderr) sys.exit(1) manifest_server = self.manifest.manifest_server if not opt.quiet: print('Using manifest server %s' % manifest_server) if '@' not in manifest_server: username = None password = None if opt.manifest_server_username and opt.manifest_server_password: username = opt.manifest_server_username password = opt.manifest_server_password else: try: info = netrc.netrc() except IOError: # .netrc file does not exist or could not be opened pass else: try: parse_result = urllib.parse.urlparse(manifest_server) if parse_result.hostname: auth = info.authenticators(parse_result.hostname) if auth: username, _account, password = auth else: print('No credentials found for %s in .netrc' % parse_result.hostname, file=sys.stderr) except netrc.NetrcParseError as e: print('Error parsing .netrc file: %s' % e, file=sys.stderr) if (username and password): manifest_server = manifest_server.replace('://', '://%s:%s@' % (username, password), 1) transport = PersistentTransport(manifest_server) if manifest_server.startswith('persistent-'): manifest_server = manifest_server[len('persistent-'):] try: server = xmlrpc.client.Server(manifest_server, transport=transport) if opt.smart_sync: p = self.manifest.manifestProject b = p.GetBranch(p.CurrentBranch) branch = b.merge if branch.startswith(R_HEADS): branch = branch[len(R_HEADS):] if 'SYNC_TARGET' in os.environ: target = os.environ['SYNC_TARGET'] [success, manifest_str] = server.GetApprovedManifest(branch, target) elif ('TARGET_PRODUCT' in os.environ and 'TARGET_BUILD_VARIANT' in os.environ): target = '%s-%s' % (os.environ['TARGET_PRODUCT'], os.environ['TARGET_BUILD_VARIANT']) [success, manifest_str] = server.GetApprovedManifest(branch, target) else: [success, manifest_str] = server.GetApprovedManifest(branch) else: assert(opt.smart_tag) [success, manifest_str] = server.GetManifest(opt.smart_tag) if success: manifest_name = os.path.basename(smart_sync_manifest_path) try: with open(smart_sync_manifest_path, 'w') as f: f.write(manifest_str) except IOError as e: print('error: cannot write manifest to %s:\n%s' % (smart_sync_manifest_path, e), file=sys.stderr) sys.exit(1) self._ReloadManifest(manifest_name) else: print('error: manifest server RPC call failed: %s' % manifest_str, file=sys.stderr) sys.exit(1) except (socket.error, IOError, xmlrpc.client.Fault) as e: print('error: cannot connect to manifest server %s:\n%s' % (self.manifest.manifest_server, e), file=sys.stderr) sys.exit(1) except xmlrpc.client.ProtocolError as e: print('error: cannot connect to manifest server %s:\n%d %s' % (self.manifest.manifest_server, e.errcode, e.errmsg), file=sys.stderr) sys.exit(1) return manifest_name
def setUp (self): fp = open(temp_filename, 'wt') fp.write(TEST_NETRC) fp.close() self.netrc = netrc.netrc(temp_filename)