def get_sso_cookiejar(loginUrl, cookieFile=None): """ Helper method to login and get an SSO cookie (if needed). If SSO login with the coookies from ``cookieFile`` works, no new authentication is done. If a new authentication is needed, the resulting cookie jar is stored in ``cookieFile`` :param loginurl: URL to use for login (i.e. any protected url) :param cookieFile: path of a file with a pickled requests cookie jar :returns: the resulting cookie jar (with valid SSO cookie) """ if cookieFile: try: with open(cookieFile, "rb") as f: cookieJar = pickle.load(f) with requests.Session() as s: s.cookies = cookieJar r1 = s.get(loginUrl, timeout=DEFAULT_TIMEOUT_SECONDS) if r1.url == loginUrl: logging.debug("SSO cookie from {0} is still valid".format( cookieFile)) return cookieJar except IOError as ex: logger.exception(ex) cookieJar = cern_sso.krb_sign_on(loginUrl) with open(cookieFile, "wb") as f: pickle.dump(cookieJar, f) return cookieJar
def krb_cookie(cls): """Retrieve the Kerberos cookie. Note: Make sure the user has a valid Kerberos ticket before retrieving the cookie. """ return cern_sso.krb_sign_on(BASE_URL)
def read_url(self,url): if not self.cookies: try: self.cookies = cern_sso.krb_sign_on(url) except requests.exceptions.HTTPError as ex: print "error in getting kerberos cookies\n ",ex,"\nmost likely you dont have a kerberos ticket active (or you are not allowed to view the webpage)\ntry doing a kinit\n" sys.exit() nr_tries = 0 max_tries = 3 while nr_tries < max_tries: try: data = self.session.get(url,cookies=self.cookies) nr_tries = max_tries except requests.exceptions.ConnectionError: nr_tries += 1 print "connection error, re-trying ",nr_tries return data.text
def generate(url): cookie = cern_sso.krb_sign_on(url) return cookie
def run(self, fgc_session, **kwargs): super().run(fgc_session, **kwargs) slot, board, device, variant, var_revision, api_revision, bin_crc, fw_file_repo, fw_file_loc, loose = ( kwargs["slot"], kwargs["board"], kwargs["device"], kwargs["variant"], kwargs["var_revision"], kwargs["api_revision"], kwargs["bin_crc"], kwargs["fw_file_repo"], kwargs["fw_file_loc"], kwargs["loose"],) try: cookies = cern_sso.krb_sign_on("https://edms.cern.ch/ws/api/users/current") resp = requests.get(fw_file_loc, cookies=cookies, timeout=20) resp.raise_for_status() except (requests.HTTPError, requests.exceptions.RequestException) as re: self._logger.warning(f"Could not get bin file from EDMS {fw_file_loc}: {re}") self._logger.info("Trying to get file from file system...") response_data = self._get_file_from_fs(Path(fw_file_repo) / board / device / variant, fw_file_loc) except IndexError as ie: self._logger.warning(f"cern_sso library failed signing on EDMS: {ie}") self._logger.info("Trying to get file from file system...") response_data = self._get_file_from_fs(Path(fw_file_repo) / board / device / variant, fw_file_loc) else: if resp.status_code == 200 and resp.content.find(b"Not found") != -1: raise FileNotFoundError(f"Could not get bin file {fw_file_loc} from EDMS (file not found)") response_data = resp.content fw_file_size = len(response_data) if not fw_file_size: raise RuntimeError(f"File's {fw_file_loc} is empty. Nothing to do...") if fw_file_size > FW_FILE_LIMIT_BYTES: raise RuntimeError(f"File's {fw_file_loc} size {fw_file_size} over limit {FW_FILE_LIMIT_BYTES}") # Check against header header_data, bin_crc = ff_utils.extract_data_from_file_header_trailer(response_data) header_info = PmStateTransferring.ProgramInfo(*header_data) adapter_info = PmStateTransferring.ProgramInfo(board, device, variant, var_revision, api_revision) try: ff_utils.check_header_data_consistency(adapter_info, header_info) except AssertionError as ae: if not loose: raise AssertionError(f"Adapter data != FW file header data: {ae}") from ae packet = list() for word in range(0, fw_file_size, 4): # Bin into hex and decode ascii_word = hexlify(response_data[word:word + 4]).decode() # Add padding ascii_word += "".join(["0"] * (CHARS_PER_WORD - len(ascii_word))) packet.append(hex(int(ascii_word, 16))) _ = fgc_session.set("REGFGC3.PROG.SLOT" ,slot) _ = fgc_session.set("REGFGC3.PROG.DEVICE" ,device) _ = fgc_session.set("REGFGC3.PROG.VARIANT" ,variant) _ = fgc_session.set("REGFGC3.PROG.VARIANT_REVISION" ,var_revision) _ = fgc_session.set("REGFGC3.PROG.API_REVISION" ,api_revision) _ = fgc_session.set("REGFGC3.PROG.BIN_SIZE_BYTES" ,fw_file_size) _ = fgc_session.set("REGFGC3.PROG.BIN_CRC" ,bin_crc) for i in range(0, len(packet), LIMIT_GW_CMD_WORDS): fgc_session.set(f"REGFGC3.PROG.BIN[{i},]", ",".join(packet[i:i + LIMIT_GW_CMD_WORDS])) super().run(fgc_session, **kwargs)
parser = argparse.ArgumentParser(description="Find the McM prep IDs and generator settings for a MiniAOD or NanoAOD sample") parser.add_argument("-v", "--verbose", action="store_true", help="Print verbose output") parser.add_argument("dataset", nargs="+", help="Dataset names in DAS (MINI/NANO/)AODSIM") parser.add_argument("--cernSSOcookies", type=str, help="Pickled cookie jar with CERN SSO cookies for https://cms-pdmv.cern.ch/mcm/ (to cache between subsequent runs - remove if expired)" ) args = parser.parse_args() logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO)) prodcookies = None if args.cernSSOcookies: if os.path.isfile(args.cernSSOcookies): with open(args.cernSSOcookies, "rb") as f: prodcookies = pickle.load(f) if not prodcookies: import cern_sso prodcookies = cern_sso.krb_sign_on("https://cms-pdmv.cern.ch/mcm/", verify=False) if args.cernSSOcookies: with open(args.cernSSOcookies, "wb") as f: pickle.dump(prodcookies, f) from pprint import pprint from collections import defaultdict for dataset in args.dataset: aodSim = getAODSIMName(dataset) aodsimPrepID = getMcMPrepID(aodSim) logger.debug(f"prep_id for {aodSim}: {aodsimPrepID}") with warnings.catch_warnings(): warnings.simplefilter("ignore") req_aod = requests.get(f"https://cms-pdmv.cern.ch/mcm/public/restapi/requests/get/{aodsimPrepID}", verify=False).json()["results"] chainId = req_aod["member_of_chain"][0] logger.debug(f"Chained prep ID: {chainId}")
import cern_sso from zeep import Client from zeep.transports import Transport from dbconstants import WSDL # from KerberosTicketGenerator import AttemptGenerateKerberosTicket try: # AttemptGenerateKerberosTicket() cookies = cern_sso.krb_sign_on(WSDL) transport = Transport(cache=False) transport.session.cookies.update(cookies) client = Client(WSDL, transport=transport) except Exception as e: raise class DBRequest: def __init__(self, executeImmediately=True, **kwargs): self.RequestInfo = kwargs self.validate() if executeImmediately: self.ExecuteRequest() def validate(self): """ Throw key and type errors as appropriate """ pass
def main(): p = argparse.ArgumentParser() p.add_argument("--access-token", help="Gitlab access token to update the releases", default=os.getenv("ATSJENKINS_ACCESS_TOKEN", None)) p.add_argument("--dry-run", "-s", action="store_true") args = p.parse_args() jira_url = "https://its.cern.ch/jira" cookies = cern_sso.krb_sign_on(jira_url) jira = JIRA(cookies=cookies, url=jira_url) gl = gitlab.Gitlab("https://gitlab.cern.ch", private_token=args.access_token) if not args.dry_run: assert args.access_token is not None gl.auth() project = gl.projects.get("acts/acts-core") with spinner(text="Loading tags"): tags = project.tags.list(all=True) with spinner(text="Loading merge requests"): mrlist = project.mergerequests.list(state="merged", target_branch="master", all=True) with ThreadPoolExecutor(max_workers=15) as tp: mrs = mrlist for tag in tags: date = dateutil.parser.parse(tag.commit["created_at"]) tag.created_at = date tags = list(sorted(tags, key=lambda t: t.created_at)) def augment_with_commit(mr): commit = project.commits.get(mr.sha) date = dateutil.parser.parse(commit.created_at) mr.commit_date = date return mr mrs = mtmap(tp, augment_with_commit, mrs, desc="Loading MR commit info") def load_issues(tag): version = parse_version(tag) try: return tag, jira.get_version_issues(version) except JIRAException: return tag, [] version_issues = dict(mtmap(tp, load_issues, tags, desc="Loading issues from JIRA")) tag_mrs = {} tag_mrs[tags[0]] = [] for mr in mrs: if tags[0].created_at > mr.commit_date: tag_mrs[tags[0]].append(mr) for tag, tagn in zip(tags, tags[1:]): tag_mrs[tagn] = [] for mr in mrs: if tag.created_at < mr.commit_date < tagn.created_at: tag_mrs[tagn].append(mr) print("Found", len(tags), "tags") for tag in prog_iter(tags, desc="Updating tag release notes"): name = tag.name version = parse_version(tag) has_release = tag.release is not None prog_write(name) relnotes = make_release_notes(version, version_issues[tag], tag_mrs[tag]) if not has_release: prog_write("Creating release for tag %s"%name) else: prog_write("Updating release notes for tag %s"%name) if not args.dry_run: tag.set_release_description(relnotes) prog_write("Release notes for %s set" % name) print("Release note synchronization complete")
def test_krb_signin(): cookies = cern_sso.krb_sign_on(TEST_URL) r1 = requests.get(TEST_URL, cookies=cookies) assert r1.status_code == 200
#!/usr/bin/env python import requests, re import cern_sso s = requests.Session() domain = 'https://indico.cern.ch/' print "Getting cookies" s.cookies = cern_sso.krb_sign_on(domain + 'category/6142/') name_re = re.compile(r'.*/(.*)$') with open('slides.txt') as f: for url in f: url = url.strip() url = domain + url r = s.get(url, stream=True) r.raise_for_status() name = 'slides/' + name_re.sub(r'\1', url) print name with open(name, 'wb') as f: for block in r.iter_content(1024): f.write(block)
# DEBUG takes presedence over VERBOSE: if args.debug: logger.setLevel(logging.DEBUG) elif args.verbose: logger.setLevel(logging.INFO) cookie_filename = args.cookie_filename target_url = args.url cookiejar = MozillaCookieJar(cookie_filename) if args.kerberos: cern_sso.krb_sign_on(target_url, cookiejar=cookiejar) elif args.cert: cert_file = "%s.pem" % args.cert key_file = "%s.key" % args.cert logger.info("Using SSL certificate file %s and key %s" % (cert_file, key_file)) cern_sso.cert_sign_on(target_url, cert_file=cert_file, key_file=key_file, cookiejar=cookiejar) else: assert False, "Either kerberos or cert should ALWAYS be true!" if not cookiejar: