def curl_binary(self): """Return a path to a curl binary, priority in the order below. Return None if none found. 1. env['CURL_PATH'] 2. app pref 'CURL_PATH' 3. a 'curl' binary that can be found in the PATH environment variable 4. '/usr/bin/curl' """ if "CURL_PATH" in self.env and is_executable(self.env["CURL_PATH"]): return self.env["CURL_PATH"] curl_path_pref = get_pref("CURL_PATH") if curl_path_pref: if is_executable(curl_path_pref): return curl_path_pref else: log_err( "WARNING: curl path given in the 'CURL_PATH' preference:'{}' " "either doesn't exist or is not executable! Falling back " "to one set in PATH, or /usr/bin/curl.".format( curl_path_pref)) for path_env in os.environ["PATH"].split(":"): curlbin = os.path.join(path_env, "curl") if is_executable(curlbin): return curlbin if is_executable("/usr/bin/curl"): return "/usr/bin/curl" raise ProcessorError("Unable to locate or execute any curl binary")
def get_or_setup_token(self): """Setup a GitHub OAuth token string. Will help to create one if necessary. The string will be stored in TOKEN_LOCATION and used again if it exists.""" token = self._get_token() if not token and not os.path.exists(TOKEN_LOCATION): print("""Create a new token in your GitHub settings page: https://github.com/settings/tokens To save the token, paste it to the following prompt.""") token = input("Token: ") if token: log(f"Writing token file {TOKEN_LOCATION}.") try: with open(TOKEN_LOCATION, "w") as tokenf: tokenf.write(token) os.chmod(TOKEN_LOCATION, 0o600) except OSError as err: log_err( f"Couldn't write token file at {TOKEN_LOCATION}! Error: {err}" ) else: log("Skipping token file creation.") self.token = token return token
def code_search(self, query: str, use_token: bool = False): """Search GitHub code repos""" if use_token: _ = self.get_or_setup_token() # Do the search, including text match metadata (results, code) = self.call_api( "/search/code", query=query, accept="application/vnd.github.v3.text-match+json", ) if code == 403: log_err( "You've probably hit the GitHub's search rate limit, officially 5 " "requests per minute.\n") if results: log_err("Server response follows:\n") log_err(results.get("message", None)) log_err(results.get("documentation_url", None)) return None if results is None or code is None: log_err("A GitHub API error occurred!") return None return results
def __init__(self, curl_path=None, curl_opts=None): super(GitHubSession, self).__init__() self.env = {} self.env["url"] = None if curl_path: self.env["CURL_PATH"] = curl_path if curl_opts: self.env["curl_opts"] = curl_opts token = get_pref("GITHUB_TOKEN") self.http_result_code = None if token: self.token = token elif os.path.exists(TOKEN_LOCATION): try: with open(TOKEN_LOCATION, "r") as tokenf: self.token = tokenf.read() except IOError as err: log_err( "Couldn't read token file at {}! Error: {}".format( TOKEN_LOCATION, err ) ) self.token = None else: self.token = None
def __init__(self): token = get_pref("GITHUB_TOKEN") if token: self.token = token elif os.path.exists(TOKEN_LOCATION): try: with open(TOKEN_LOCATION, "r") as tokenf: self.token = tokenf.read() except IOError as err: log_err("Couldn't read token file at %s! Error: %s" % (TOKEN_LOCATION, err)) self.token = None else: self.token = None
def _get_token(self, token_path: str = TOKEN_LOCATION) -> Optional[str]: """Reads token from perferences or provided token path. Defaults to TOKEN_LOCATION for the token path. Otherwise returns None. """ token = get_pref("GITHUB_TOKEN") if not token and os.path.exists(token_path): try: with open(token_path, "r") as tokenf: token = tokenf.read().strip() except OSError as err: log_err( f"Couldn't read token file at {token_path}! Error: {err}") token = None # TODO: validate token given we found one but haven't checked its # auth status return token
def setup_token(self): """Setup a GitHub OAuth token string. Will help to create one if necessary. The string will be stored in TOKEN_LOCATION and used again if it exists.""" if not os.path.exists(TOKEN_LOCATION): print( """Create a new token in your GitHub settings page: https://github.com/settings/tokens To save the token, paste it to the following prompt.""" ) token = raw_input("Token: ") if token: log("""Writing token file {}.""".format(TOKEN_LOCATION)) try: with open(TOKEN_LOCATION, "w") as tokenf: tokenf.write(token) os.chmod(TOKEN_LOCATION, 0o600) except IOError as err: log_err( "Couldn't write token file at {}! Error: {}".format( TOKEN_LOCATION, err ) ) else: log("Skipping token file creation.") else: try: with open(TOKEN_LOCATION, "r") as tokenf: token = tokenf.read() except IOError as err: log_err( "Couldn't read token file at {}! Error: {}".format( TOKEN_LOCATION, err ) ) # TODO: validate token given we found one but haven't checked its # auth status self.token = token
def download_with_curl(self, curl_cmd): """Download file using curl and return raw headers.""" p_stdout, p_stderr, retcode = self.execute_curl(curl_cmd) if retcode: # Non-zero exit code from curl => problem with download curl_err = self.parse_curl_error(p_stderr) log_err( f"Curl failure: Could not retrieve URL {self.env['url']}: {curl_err}" ) if retcode == 22: # 22 means any 400 series return code. Note: header seems not to # be dumped to STDOUT for immediate failures. Hence # http_result_code is likely blank/000. Read it from stderr. if re.search(r"URL returned error: [0-9]+", p_stderr): m = re.match(r".* (?P<status_code>\d+) .*", p_stderr) if m.group("status_code"): self.http_result_code = m.group("status_code") return p_stdout
def search_recipes(argv: List[str]): """Search recipes on GitHub""" verb = argv[1] parser = gen_common_parser() parser.set_usage(f"Usage: %prog {verb} [options] search_term\n" "Search for recipes on GitHub. The AutoPkg organization " "at github.com/autopkg\n" "is the canonical 'repository' of recipe repos, " "which is what is searched by\n" "default.") parser.add_option( "-u", "--user", default=DEFAULT_SEARCH_USER, help=("Alternate GitHub user whose repos to search. " f"Defaults to '{DEFAULT_SEARCH_USER}'."), ) parser.add_option( "-p", "--path-only", action="store_true", default=False, help=("Restrict search results to the recipe's path " "only. Note that the search API currently does not " "support fuzzy matches, so only exact directory or " "filenames (minus the extensions) will be " "returned."), ) parser.add_option( "-t", "--use-token", action="store_true", default=False, help=("Use a public-scope GitHub token for a higher " "rate limit. If a token doesn't exist, you'll " "be prompted for your account credentials to " "create one."), ) # Parse arguments (options, arguments) = common_parse(parser, argv) if len(arguments) < 1: log_err("No search query specified!") return 1 results_limit = 100 term = quote(arguments[0]) results = GitHubSession().search_for_name(term, options.path_only, options.user, options.use_token, results_limit) if not results: return 2 print_gh_search_results(results) print() print("To add a new recipe repo, use 'autopkg repo-add <repo name>'") if len(results) > results_limit: print() print("Warning: Search yielded more than 100 results. Please try a " "more specific search term.") return 3 return 0
def call_api( self, endpoint, method="GET", query=None, data=None, headers=None, accept="application/vnd.github.v3+json", ): """Return a tuple of a serialized JSON response and HTTP status code from a call to a GitHub API endpoint. Certain APIs return no JSON result and so the first item in the tuple (the response) will be None. endpoint: REST endpoint, beginning with a forward-slash method: optional alternate HTTP method to use other than GET query: optional additional query to include with URI (passed directly) data: optional dict that will be sent as JSON with request headers: optional dict of additional headers to send with request accept: optional Accept media type for exceptional APIs (like release assets).""" # Compose the URL url = BASE_URL + endpoint if query: url += "?" + query try: # Compose the curl command curl_path = curl_cmd() if not curl_path: return (None, None) cmd = [ curl_path, "--location", "--silent", "--show-error", "--fail", "--dump-header", "-", ] cmd.extend(["-X", method]) cmd.extend(["--header", "%s: %s" % ("User-Agent", "AutoPkg")]) cmd.extend(["--header", "%s: %s" % ("Accept", accept)]) # Pass the GitHub token as a header if self.token: cmd.extend([ "--header", "%s: %s" % ("Authorization", "token %s" % self.token) ]) # Additional headers if defined if headers: for header, value in headers.items(): cmd.extend(["--header", "%s: %s" % (header, value)]) # Set the data header if defined if data: data = json.dumps(data) cmd.extend( ["-d", data, "--header", "Content-Type: application/json"]) # Final argument to curl is the URL cmd.append(url) # Start the curl process proc = subprocess.Popen( cmd, shell=False, bufsize=1, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) header = {} header["http_result_code"] = "000" header["http_result_description"] = "" donewithheaders = False maxheaders = 15 page_content = "" # Parse the headers and the JSON from curl output while True: info = proc.stdout.readline() if not donewithheaders: info = info.strip("\r\n") if info: if info.startswith("HTTP/"): part = info.split(None, 2) header["http_result_code"] = part[1] try: header["http_result_description"] = part[2] except IndexError: pass elif "Status: 301" in info: # Skip this block of headers when redirect is found while True: if proc.stdout.readline().strip("\r\n"): continue break elif ": " in info: part = info.split(None, 1) fieldname = part[0].rstrip(":").lower() try: header[fieldname] = part[1] except IndexError: pass else: donewithheaders = True else: page_content += info if proc.poll() is not None: # For small download files curl may exit before all headers # have been parsed, don't immediately exit. maxheaders -= 1 if donewithheaders or maxheaders <= 0: break # All curl output should now be parsed retcode = proc.poll() if retcode: curlerr = "" try: curlerr = proc.stderr.read().rstrip("\n") curlerr = curlerr.split(None, 2)[2] except IndexError: pass if retcode == 22: # 22 means any 400 series return code. Note: header seems not to # be dumped to STDOUT for immediate failures. Hence # http_result_code is likely blank/000. Read it from stderr. if re.search(r"URL returned error: [0-9]+", curlerr): m = re.match(r".* (?P<status_code>\d+) .*", curlerr) if m.group("status_code"): header["http_result_code"] = m.group("status_code") log_err("Could not retrieve URL %s: %s" % (url, curlerr)) if page_content: resp_data = json.loads(page_content) else: resp_data = None except OSError: log_err("Could not retrieve URL: %s" % url) resp_data = None http_result_code = int(header.get("http_result_code")) return (resp_data, http_result_code)
try: proc = subprocess.Popen( ("/usr/bin/hdiutil", "detach", self.mounts[pathname]), stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stderr = proc.communicate()[1] except OSError as err: raise ProcessorError( "hdiutil execution failed with error code %d: %s" % (err.errno, err.strerror)) if proc.returncode != 0: raise ProcessorError("unmounting %s failed: %s" % (pathname, stderr)) # Delete mount from mount list. del self.mounts[pathname] if __name__ == "__main__": try: DMGMOUNTER = DmgMounter() MOUNTPOINT = DMGMOUNTER.mount("Download/Firefox-sv-SE.dmg") log("Mounted at %s" % MOUNTPOINT) DMGMOUNTER.unmount("Download/Firefox-sv-SE.dmg") except ProcessorError as err: log_err("ProcessorError: %s" % err) sys.exit(10) else: sys.exit(0)
try: proc = subprocess.Popen( ("/usr/bin/hdiutil", "detach", self.mounts[pathname]), stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, ) (_, stderr) = proc.communicate() except OSError as err: raise ProcessorError( f"hdiutil execution failed with error code {err.errno}: {err.strerror}" ) if proc.returncode != 0: raise ProcessorError(f"unmounting {pathname} failed: {stderr}") # Delete mount from mount list. del self.mounts[pathname] if __name__ == "__main__": try: DMGMOUNTER = DmgMounter() MOUNTPOINT = DMGMOUNTER.mount("Download/Firefox-sv-SE.dmg") log(f"Mounted at {MOUNTPOINT}") DMGMOUNTER.unmount("Download/Firefox-sv-SE.dmg") except ProcessorError as err: log_err(f"ProcessorError: {err}") sys.exit(10) else: sys.exit(0)