def update_acl_genomic_file(self, gf, biospecimen_id): """ Updates acl's of genomic files that are associated with biospecimen """ # Get the links of genomic files for that biospecimen retry_count = 3 response = self.get_gfs_from_biospecimen(biospecimen_id) for r in response['results']: acl = gf if not r['visible']: acl = {"acl": []} # Do not update if acl's are as expected if r['acl'] != acl['acl']: while retry_count > 1: resp = requests.patch( self.api + '/genomic-files/' + r['kf_id'], json=acl, timeout=self.context.get_remaining_time_in_millis() - 8000) if resp.status_code != 500: break else: retry_count = retry_count - 1 if resp.status_code != 200: raise TimeoutException return
def post_accept(id): requests.put( "https://discordapp.com/api/guilds/200746010102726657/members/" + str(id) + "/roles/530223486694719558", data=None, headers=headers) requests.put( "https://discordapp.com/api/guilds/200746010102726657/members/" + str(id) + "/roles/" + guildrole(playerdata['guildid']), data=None, headers=headers) guildtag = getguildtag(playerdata['guildid']) requests.patch( "https://discordapp.com/api/guilds/200746010102726657/members/" + str(id), data=None, json={'nick': '[' + guildtag + '] ' + player['albionname']}, headers=headers) return
def set_auth0_user_verified(userid): headers = { 'Authorization': 'Bearer %s' % (get_auth0_management_token()['access_token']) } url = 'https://neo4j-sync.auth0.com/api/v2/users/%s' % ( urllib.quote(userid)) r2 = requests.patch(url, json={'email_verified': True}, headers=headers) logger.info('Updating verified for: %s as %s' % (userid, r2.status_code)) logger.info('Received response from settng email verified') logger.info(r2.text)
def makePatchRequest(self, path, data, event): req = requests.patch( Event.baseURL + path, data=json.dumps(data), headers={ "Content-Type": "application/json", "X-Access-Token": event["context"]["System"]["user"]["accessToken"], "X-Client-ID": "ee29112eeee47ea2179d" }) print(req) return req
def lambda_handler(event, context): session = boto3.session.Session() secret_manager = session.client(service_name='secretsmanager', region_name="us-east-1") client_id = secret_manager.get_secret_value(SecretId="twitch.client_id") client_id = json.loads(client_id["SecretString"])["twitch"]["client_id"] response = requests.patch( "https://api.twitch.tv/helix/channels?broadcaster_id={}".format( event["user_id"]), params={"game_id": event["game_id"]}, headers={ "Authorization": "Bearer {}".format(event["access_token"]), "Client-ID": client_id }) return {'status_code': response.status_code, 'text': response.text}
def lambda_handler(event, context): ''' :summary: Function calls below mentioned rest API endpoints from RocketCX Enterprise Connector for ServiceNow. api/x_ecsd_amazon_conn/connect_cti_api/survey This API accepts caller phone number, survey points (1-5) and Amazon connect call ID for which survey feedback needs to be recorded. For more details on API please refer to API documentaiton. :input: record number and survey points form call flow :output: status code of 200 on sucessfull update of SNOW call log table. ''' url: str = os.environ['SERVICENOW_HOST'] servicenow_user: str = os.environ['SERVICENOW_USER'] servicenow_password: str = os.environ['SERVICENOW_PASSWORD'] phone: int = event['Details']['Parameters']['Phone'] survey_points: int = event['Details']['Parameters']['Survey_Points'] call_id: str = event['Details']['Parameters']['Call_Id'] url_with_extension = url + f'api/x_ecsd_amazon_conn/connect_cti_api/survey' headers = {"Accept": "application/json"} survey_data = { "phone_no": phone, "point": survey_points, "call_id": call_id } response = requests.patch(url_with_extension, data=json.dumps(survey_data), auth=(servicenow_user, servicenow_password), headers=headers) if response.status_code == 200: return {'survey_update_status': 1} else: return {'survey_update_status': 0}
def update_dbgap_consent_code(self, biospecimen_id, consent_code, consent_short_name): """ Updates dbgap consent code for biospecimen id """ retry_count = 3 bs = { "dbgap_consent_code": consent_code, "consent_type": consent_short_name } while retry_count > 1: resp = requests.patch( self.api + '/biospecimens/' + biospecimen_id, json=bs, timeout=self.context.get_remaining_time_in_millis() - 12000) if resp.status_code != 500: break else: retry_count = retry_count - 1 if resp.status_code != 200: raise TimeoutException return True
def query_SW(query, method="get", payload=None): header = {"Content-Type": "application/json", "Accept": "application/json", "Authorization": "ApiKey {}".format(API_KEY_SW) } url = "https://XYZ.obsrvbl.com/api/v3/" + query if method == "get": res = requests.get(url, headers=header) elif method == "patch": res = requests.patch(url, json=payload, headers=header) else: return None if res.status_code != 200: print("Failed to execute query, text: {}".format(res.status_code)) return None res_json = json.loads(res.text) return res_json
def lambda_handler(event, context): body_str = base64.b64decode(event["body64"]) logger.info(body_str) if not verify_signature(SECRET, event["signature"], body_str): raise Exception('[Unauthorized] Authentication error') # https://developer.github.com/v3/activity/events/types/#releaseevent body = json.loads(body_str) if body["action"] != "published": return 'Not a "published" event' release = body["release"] repository = body["repository"] AUTHOR = release["author"]["login"] TAG_NAME = release["tag_name"] HTML_URL = release["html_url"] REPO_NAME = repository["name"] REPO_FULLNAME = repository["full_name"] REPO_URLS = [repository["clone_url"], repository["git_url"], repository["ssh_url"]] REPO_HTML_URL = repository["html_url"] if REPO_NAME.endswith(".jl"): PKG_NAME = REPO_NAME[:-3] else: errorissue(REPO_FULLNAME, AUTHOR, "The repository does not have a .jl suffix.") if not re.match(r"v\d+\.\d+\.\d+$", TAG_NAME): errorissue(REPO_FULLNAME, AUTHOR, "The tag name \"" + TAG_NAME + "\" is not of the appropriate SemVer form (vX.Y.Z).") VERSION = TAG_NAME[1:] # 1) check if package registered r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "url"), auth=(BOT_USER, BOT_PASS), params={"ref": META_BRANCH}) if r.status_code == 404: REGISTER = True else: REGISTER = False rj = r.json() # verify this is indeed the package with the correct name REPO_URL_META = gh_decode(rj).rstrip() if REPO_URL_META not in REPO_URLS: errorissue(REPO_FULLNAME, AUTHOR, "The URL of this package does not match that stored in METADATA.jl.") # 1a) get last version r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions"), auth=(BOT_USER, BOT_PASS), params={"ref": META_BRANCH}) rj = r.json() ALL_VERSIONS = [d["name"] for d in rj] PREV_VERSIONS = filter(lambda v : semverkey(v) < semverkey(VERSION), ALL_VERSIONS) if not PREV_VERSIONS: errorissue(REPO_FULLNAME, AUTHOR, "Cannot tag a new version \"" + TAG_NAME + "\" preceding all existing versions.") LAST_VERSION = max(PREV_VERSIONS, key=semverkey) # 1b) get last version sha1 r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions", LAST_VERSION, "sha1"), auth=(BOT_USER, BOT_PASS), params={"ref": META_BRANCH}) rj = r.json() LAST_SHA1 = gh_decode(rj).rstrip() # 1c) get last requires # this may not exist in some very old cases r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions", LAST_VERSION, "requires"), auth=(BOT_USER, BOT_PASS), params={"ref": META_BRANCH}) if r.status_code == 200: rj = r.json() LAST_REQUIRE = gh_decode(rj) else: LAST_REQUIRE = "" # 2) get the commit hash corresponding to the tag r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "git/refs/tags", TAG_NAME), auth=(BOT_USER, BOT_PASS)) rj = r.json() # 2a) if annotated tag: need to make another request if rj["object"]["type"] == "tag": r = requests.get(rj["object"]["url"], auth=(BOT_USER, BOT_PASS)) rj = r.json() SHA1 = rj["object"]["sha"] # 3) get the REQUIRE file from the commit r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "contents", "REQUIRE"), auth=(BOT_USER, BOT_PASS), params={"ref": SHA1}) if r.status_code == 404: errorissue(REPO_FULLNAME, AUTHOR, "The REQUIRE file could not be found.") rj = r.json() REQUIRE = gh_decode(rj).replace('\r\n', '\n') # normalize line endings # 4) get current METADATA head commit r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "git/refs/heads", META_BRANCH), auth=(BOT_USER, BOT_PASS)) rj = r.json() PREV_COMMIT_SHA = rj["object"]["sha"] PREV_COMMIT_URL = rj["object"]["url"] # 5) get tree corresponding to last METADATA commit r = requests.get(PREV_COMMIT_URL, auth=(BOT_USER, BOT_PASS)) rj = r.json() PREV_TREE_SHA = rj["tree"]["sha"] # 6a) create blob for REQUIRE r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"), auth=(BOT_USER, BOT_PASS), json=gh_encode(REQUIRE)) rj = r.json() REQUIRE_BLOB_SHA = rj["sha"] # 6b) create blob for SHA1 r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"), auth=(BOT_USER, BOT_PASS), json=gh_encode(SHA1+"\n")) rj = r.json() SHA1_BLOB_SHA = rj["sha"] # 6c) create blob for url if necessary if REGISTER: r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"), auth=(BOT_USER, BOT_PASS), json=gh_encode(REPO_URLS[0]+"\n")) rj = r.json() URL_BLOB_SHA = rj["sha"] # 7) create new tree tree_data = { "base_tree": PREV_TREE_SHA, "tree": [ { "path": urljoin(PKG_NAME,"versions",VERSION,"requires"), "mode": "100644", "type": "blob", "sha": REQUIRE_BLOB_SHA }, { "path": urljoin(PKG_NAME,"versions",VERSION,"sha1"), "mode": "100644", "type": "blob", "sha": SHA1_BLOB_SHA } ] } if REGISTER: tree_data["tree"].append({ "path": urljoin(PKG_NAME,"url"), "mode": "100644", "type": "blob", "sha": URL_BLOB_SHA }) r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/trees"), auth=(BOT_USER, BOT_PASS), json=tree_data) rj = r.json() NEW_TREE_SHA = rj["sha"] # 7.5) get user info for commit r = requests.get(urljoin(GITHUB_API,"users",AUTHOR), auth=(BOT_USER, BOT_PASS)) rj = r.json() AUTHOR_NAME = rj["name"] if AUTHOR_NAME is None: AUTHOR_NAME = AUTHOR AUTHOR_EMAIL = rj["email"] if AUTHOR_EMAIL is None: # get the email from the last commit by the author r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "commits"), auth=(BOT_USER, BOT_PASS), params={"author": AUTHOR}) rj = r.json() if rj: AUTHOR_EMAIL = rj[0]["commit"]["author"]["email"] else: # otherwise use fallback (may or may not link to the author) AUTHOR_EMAIL = AUTHOR + "@users.noreply.github.com" # 8) create commit if REGISTER: msg = "Register " + REPO_NAME + " " + TAG_NAME + " [" + HTML_URL + "]" else: msg = "Tag " + REPO_NAME + " " + TAG_NAME + " [" + HTML_URL + "]" r = requests.post(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/commits"), auth=(BOT_USER, BOT_PASS), json={ "message": msg, "parents": [ PREV_COMMIT_SHA ], "tree": NEW_TREE_SHA, "author": { "name": AUTHOR_NAME, "email": AUTHOR_EMAIL }, "committer": { "name": "AttoBot", "email": "*****@*****.**" } }) rj = r.json() NEW_COMMIT_SHA = rj["sha"] # 9) Create new ref (i.e. branch) NEW_BRANCH_NAME = PKG_NAME + "/" + TAG_NAME r = requests.post(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/refs"), auth=(BOT_USER, BOT_PASS), json={ "ref": "refs/heads/" + NEW_BRANCH_NAME, "sha": NEW_COMMIT_SHA }) if r.status_code == 422: EXISTING = True # 9a) PR already exists, update the ref instead r = requests.patch(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/refs/heads", NEW_BRANCH_NAME), auth=(BOT_USER, BOT_PASS), json={ "sha": NEW_COMMIT_SHA, "force": True }) else: EXISTING = False # 10) Get travis link # this sometimes misses, if the tag has not yet made it to travis TRAVIS_PR_LINE = "" r = requests.get(urljoin("https://api.travis-ci.org/","repos",REPO_FULLNAME,"branches",TAG_NAME)) if r.status_code == requests.codes.ok: rj = r.json() build_id = str(rj["branch"]["id"]) if SHA1 == rj["commit"]["sha"]: badge_url = urljoin("https://api.travis-ci.org/", REPO_FULLNAME + ".svg?branch=" + TAG_NAME) build_url = urljoin("https://travis-ci.org/", REPO_FULLNAME, "builds", build_id) TRAVIS_PR_LINE = "Travis: [![Travis Build Status](" + badge_url + ")](" + build_url + ")\n" # 11) Create pull request if REGISTER: title = "Register new package " + REPO_NAME + " " + TAG_NAME body = "Repository: [" + REPO_FULLNAME + "](" + REPO_HTML_URL + ")\n" + \ "Release: [" + TAG_NAME + "](" + HTML_URL + ")\n" + \ TRAVIS_PR_LINE + \ "cc: @" + AUTHOR + "\n" + \ "\n" + TAG_REQ + "\n" + \ "\n@" + AUTHOR + " This PR will remain open for 24 hours for feedback (which is optional). If you get feedback, please let us know if you are making changes, and we'll merge once you're done." else: diff_url = urljoin(REPO_HTML_URL, "compare", LAST_SHA1 + "..." + SHA1) req_diff = "".join(difflib.unified_diff( LAST_REQUIRE.splitlines(True), REQUIRE.splitlines(True), LAST_VERSION + "/requires", VERSION + "/requires")) if req_diff == "": req_status = "no changes" else: # Ensure closing ``` is on its own line if not req_diff.endswith("\n"): req_diff += "\n" req_status = "\n```diff\n" + req_diff + "```" title = "Tag " + REPO_NAME + " " + TAG_NAME body = "Repository: [" + REPO_FULLNAME + "](" + REPO_HTML_URL + ")\n" + \ "Release: [" + TAG_NAME + "](" + HTML_URL + ")\n" + \ TRAVIS_PR_LINE + \ "Diff: [vs v" + LAST_VERSION + "](" + diff_url + ")\n" + \ "`requires` vs v" + LAST_VERSION + ": " + req_status + "\n" + \ "cc: @" + AUTHOR + "\n" + \ "\n" + TAG_REQ if EXISTING: r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "pulls"), params={ "head": BOT_USER + ":" + NEW_BRANCH_NAME, "state": "all" }) rj = r.json()[0] # assume it is the only return value r = requests.post(rj["comments_url"], auth=(BOT_USER, BOT_PASS), json={ "body": body, }) rj = r.json() return "Comment created: " + rj["url"] else: r = requests.post(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "pulls"), auth=(BOT_USER, BOT_PASS), json={ "title": title, "body": body, "head": BOT_USER + ":" + NEW_BRANCH_NAME, "base": META_BRANCH }) rj = r.json() return "PR created: " + rj["url"]
def patch(self, url, data, headers): logger.debug("PATCH Requests:\nurl=%s\ndata=%s" % (url, data)) r = requests.patch(url=url, data=json.dumps(data), headers=headers) logger.debug("Response: %s" % r.text) return __check_resp__(r)