def handler(event, context): endpoint = 'https://' + hostname if not hostname.startswith( ('http://', 'https://')) else hostname url = endpoint + '/' if not endpoint.endswith('/') else endpoint auth = AWSV4Sign(credentials, region, 'es') response = requests.get(url + '_cat/indices?format=json', auth=auth) payload = json.loads(response.text) indices = [element['index'] for element in payload] old_indices = 0 for index in indices: try: old_date = datetime.now() - timedelta(days=period) idx_date = parse(index, fuzzy=True) except: print('Skipping index: %s (its name does not contain a date)' % index) continue if idx_date <= old_date: print('Deleting index: %s' % index) requests.delete(url + index, auth=auth) old_indices += 1 if old_indices: print('Total indices deleted: %d' % old_indices) else: print('There are no indices older than %s days' % period)
def handle_restart(event): requests.delete(URL) return { 'sessionAttributes': event['sessionAttributes'], 'dialogAction': { 'type': 'Close', 'fulfillmentState': 'Fulfilled', 'message': { 'contentType': 'PlainText', 'content': "Done" } } }
def remove_labels(self, issue_num, labels): """ This method is to remove a list of labels to one issue. It checks whether labels exist in the repo, and removes existing labels to the issue :param issue_num: The specific issue number we want to label :param labels: The labels which we want to remove :return Response denoting success or failure for logging purposes """ labels = self._format_labels(labels) issue_labels_url = 'https://api.github.com/repos/{repo}/issues/{id}/labels/' \ .format(repo=self.repo, id=issue_num) for label in labels: delete_label_url = issue_labels_url + label response = requests.delete(delete_label_url, auth=self.auth) if response.status_code == 200: logging.info('Successfully removed label to {}: {}.'.format( str(issue_num), str(label))) else: logging.error( 'Could not remove the label to {}: {}. \nResponse: {}'. format(str(issue_num), str(label), json.dumps(response.json()))) return False return True
def remove_non_compliant_users_from_organization(users_to_remove, **kwargs): """ This function removes non-compliant users from the organization and outputs the information to a Slack channel. First it checks whether the MAX_NUMBER_USERS_TO_REMOVE parameter has been passed through. If it has, we need to check whether there are more users to remove than the safety threshold. If the threshold has been hit, send a message to Slack and exit() If the safety threshold has not been hit, we can proceed to remove the users from the organization. """ max_users_to_remove = kwargs.get('MAX_NUMBER_USERS_TO_REMOVE', None) if max_users_to_remove is not None and len( users_to_remove) > max_users_to_remove: notify_slack_channel( ":rotating_light: Github - Safety threshold (*" + str(max_users_to_remove) + "*) of users to remove from the organization has been hit.\n No users will be removed :rotating_light:", '#FFA500') else: for user, url in users_to_remove.items(): try: r = requests.delete("https://api.github.com/orgs/" + ORGANIZATION + "/memberships/" + user, headers=HEADERS) if r.status_code == 204: notify_slack_channel( ":heavy_check_mark: Github - Non-compliant user was removed :heavy_check_mark:", '#46A346', user=user, url=url) except Exception as e: notify_slack_channel("Error: " + str(e), '#ff0000')
def delete_user_from_ssc(config, headers, email): try: delete_user = requests.delete(config['SSC']['API_url'] + '/corps/' + config['SSC']['corp_name'] + '/users/' + email, headers=headers, timeout=45) logger.warning(delete_user.status_code) if (delete_user.status_code == HTTP_DELETE_CODE): return True return except: return False
def _delete_request(self, endpoint): """Delete with credentials.""" sig = self._generate_signature() return requests.delete(endpoint, params={ 'key': self._api_key, 'nonce': sig[1], 'signature': sig[0] })
def delete_list_item(event, listId, itemId): apiAccessToken = event['context']['System']['apiAccessToken'] apiEndpoint = event['context']['System']['apiEndpoint'] endpoint = '/v2/householdlists/'+listId+'/items/'+itemId headers = { 'Authorization': 'Bearer '+apiAccessToken, 'Content-Type': 'application/json' } url = apiEndpoint + endpoint r = requests.delete(url, headers=headers)
def makeDeleteRequest(self, path, params, event): req = requests.delete( Event.baseURL + path, params=params, headers={ "X-Access-Token": event["context"]["System"]["user"]["accessToken"], "X-Client-ID": "ee29112eeee47ea2179d" }) print(req) return req
def servicenow_request(url, request_type, params=None, request_data=None): """ Helper function to make REST API request to ServiceNow and return response in json format :param request_type: Type of HTTP request (GET, POST, PUT) :param params: Request parameters :param request_data: Request data :return: Response data from ServiceNow in json format """ logger.debug(f"Request Type: {request_type}") logger.debug(f"Request URL: {url}") # Set proper headers headers = { "Content-Type": "application/json", "Accept": "application/json" } # Do the HTTP request if request_type == "POST": response = requests.post(url, params=params, auth=servicenow_auth, headers=headers, data=json.dumps(request_data)) elif request_type == "DELETE": response = requests.delete(url, params=params, auth=servicenow_auth, headers=headers) else: response = requests.get(url, auth=servicenow_auth, params=params, headers=headers) # Check for HTTP codes other than 200 if (response.status_code != 200 and response.status_code != 201 and response.status_code != 202 and response.status_code != 204): logger.error("Request Failed") logger.error( f"Status: {response.status_code}, Error Response: {response.json()}" ) error_msg = "ServiceNow Request Failed" if response.status_code == 401: error_msg = "ServiceNow Request Failed - Invalid Credentails" raise Exception(error_msg) response_data = None logger.debug("Request executed successfully") if response.status_code != 204: response_data = response.json() logger.debug(f"Response data: {json.dumps(response_data)}") return response_data
def set_record(cf_email, cf_apikey, zone, ip): # choose record type if ip.version == 4: rt = 'A' else: rt = 'AAAA' cf_ttl = 1 # 60 sec. ttl seems commonn for dyndns provider, but 120 sec is cf minimum, leaving it to auto. # cloudflare v4 api cf_api = 'https://api.cloudflare.com/client/v4/' cf_header = {'Content-Type': 'application/json', 'X-Auth-Email': cf_email, 'X-Auth-Key': cf_apikey} # get cloudflare zone id r = requests.get(cf_api + 'zones', headers=cf_header, params={'status': 'active', 'name': zone}) zone_id = r.json()['result'][0]['id'] # get existing a-records r = requests.get(cf_api + 'zones/' + zone_id + '/dns_records', headers=cf_header, params={'type': rt, 'name': zone}) entries = r.json()['result'] if len(entries) == 0: # create (initial) record r = requests.post( cf_api + 'zones/' + zone_id + '/dns_records', headers=cf_header, json={'type': rt, 'name': zone, 'content': ip.exploded, 'proxied': False, 'ttl': cf_ttl }) else: # searching for additional records for record in entries[1:]: # deleting additional records requests.delete(cf_api + 'zones/' + zone_id + '/dns_records/' + record['id'], headers=cf_header) # update (existing) record r = requests.put(cf_api + 'zones/' + zone_id + '/dns_records/' + entries[0]['id'], headers=cf_header, json={'type': rt, 'name': zone, 'content': ip.exploded, 'proxied': False, 'ttl': cf_ttl })
def handler(event, context): count = 0 for record in event["Records"]: # Get the primary key for use as the Elasticsearch ID id = record["dynamodb"]["Keys"]["id"]["S"] print "bookId " + id if record['eventName'] == 'REMOVE': r = requests.delete(url + id, auth=awsauth) else: document = record['dynamodb']['NewImage'] r = requests.put(url + id, auth=awsauth, json=document, headers=headers) count += 1 return str(count) + " records processed."
def handle_restart(event): ## ----- TODO : Restart the game and answer to the user ----- ## r = requests.delete(URL) message = "Game started !" if r.status_code == 200 else "Error occured" return { 'sessionAttributes': event['sessionAttributes'], 'dialogAction': { 'type': 'Close', 'fulfillmentState': 'Fulfilled', 'message': { 'contentType': 'PlainText', 'content': message } } }
def deleteInElasticSearch(uid): url = os.environ['QNA_QUESTIONS_ENDPOINT'] + 'UnknownQuestion.' + uid data = '''{ "endpoint": "search-qnabot-elastic-16x8drir5zf0e-ph2splbwdbo7fglo4was3rho3a.us-east-1.es.amazonaws.com", "method": "POST", "path": "/qnabot/_delete_by_query?refresh=wait_for", "body": { "query": { "match": { "qid": "UnknownQuestion.''' + uid + ''' " } } } }''' response = requests.delete(url, data=data) return response
def handler(event, context): count = 0 for record in event["Records"]: # Generate a UUID from the item primary key for use as the Elasticsearch ID id = generateUUID(record) if record['eventName'] == 'REMOVE': r = requests.delete(url + id, auth=awsauth) else: document = record['dynamodb']['NewImage'] r = requests.put(url + id, auth=awsauth, json=document, headers=headers) count += 1 return str(count) + " records processed."
def handler(event, context): count = 0 for record in event["Records"]: # Get the primary key for use as the Elasticsearch ID id = record["dynamodb"]["Keys"]["id"]["S"] print "bookId " + id try: document = record["dynamodb"]["NewImage"] r = requests.put(url + id, auth=awsauth, json=document, headers=headers) count += 1 except KeyError: # this execution path is to cater for deleted records document = record["dynamodb"]["OldImage"] r = requests.delete(url + id, auth=awsauth, json=document, headers=headers) count += 1 return str(count) + " records processed."
def lambda_handler(event, context): body_str = base64.b64decode(event["body64"]) logger.info(body_str) if not verify_signature(SECRET, event["signature"], body_str): raise Exception('[Unauthorized] Authentication error') # https://developer.github.com/v3/activity/events/types/#pullrequestevent body = json.loads(body_str) if body["action"] != "closed": return 'Not a "closed" event' if body["pull_request"]["user"]["login"] != "attobot": return 'Not an attobot pull request' # branch name REF = body["pull_request"]["head"]["ref"] # if PR has been closed without merging, pause a bit to see if it is reopened # e.g. to retrigger Travis if it failed if body["pull_request"]["merged_at"] is None: PR_URL = body["pull_request"]["url"] time.sleep(20) # check if branch is still open r = requests.get(PR_URL) rj = r.json() if rj["state"] != "closed": return "Pull request has been reopened" # delete attobot branch r = requests.delete(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/refs/heads", REF), auth=(BOT_USER, BOT_PASS)) if r.status_code == 204: return "Branch " + REF + " succesfully deleted." else: return "Could not delete branch " + REF
def lambda_handler(event, context): # Load the list of indices theIndices = json.loads(retrieveIndicesAndDates()) # For date comparison today = datetime.datetime.now() # Walk through the list of indices prefix to delete indices_list = json.loads(indices_list_json) deleted = [] for idx_prefix, retention_days in indices_list.items(): # Walk through the list for entry in theIndices: # Ignore the index that has the Kibana config if not entry["index"].startswith(idx_prefix): continue # Compare the creation date with today diff = today - convertDate(entry["creation.date.string"]) # If the index was created more than retention_days ago, blow it away if diff.days > retention_days: print("Deleting index %s" % (entry["index"])) theresult = requests.delete(esEndPoint + '/' + entry["index"]) theresult.raise_for_status() deleted += [entry["index"]] print("Job completed successfully with %s indices deleted." % len(deleted)) return deleted
def delete_list_item(event, listId, itemId): headers = get_headers(event) if headers: url = event['context']['System']['apiEndpoint'] + '/v2/householdlists/'+listId+'/items/'+itemId r = requests.delete(url, headers=headers)
def catalog(event, context): """ Lambda Function to update pub catalog index at s3 event """ # Get the object from the event and show its content type bucket = event['Records'][0]['s3']['bucket']['name'] key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key']) try: # define arguments max_rows_disp = "all" max_rows= None count = 0 headers = [] headers_position = {} to_elastic_string = "" datetime_field = "" id_column = None # get new db response = s3.get_object(Bucket=bucket, Key=key) lista = response['Body'].read().split(b'\n') headers_post = {"Content-type": "application/json", "Accept": "text/plain"} # Data sctructure keys =list(indexDoc["mappings"]["catalogo"]['properties'].keys()) values = [ "%"+str(x)+"%" for x in keys] json_struct = str(dict(zip(keys, values))).replace("%',", "%',\n") connection = http.client.HTTPConnection(elastic_address) # Remove old index response = requests.delete("http://" + elastic_address + "/" + elastic_index) print("Returned from delete request: ", response) # Create index response = requests.put("http://" + elastic_address + "/" + elastic_index, data=json.dumps(indexDoc)) print("Returned from create: ", response) for row in lista: row = row.decode('utf-8').replace('"','') row = row.replace("'[","[").replace("]'","]") row = row.replace("}'", '}')# .replace("''","'") row = row.split("|") if count == 0: for iterator, col in enumerate(row): headers.append(col) headers_position[col] = iterator elif max_rows is not None and count >= max_rows: print('Max rows imported - exit') break elif len(row[0]) == 0: # Empty rows on the end of document print("Found empty rows at the end of document") break else: pos = 0 if os.name == 'nt': _data = json_struct.replace("^", '"') else: _data = json_struct.replace("'", '"') _data = _data.replace('\n','').replace('\r','') for header in headers: if header == datetime_field: datetime_type = dateutil.parser.parse(row[pos]) _data = _data.replace('"%' + header + '%"', '"{:%Y-%m-%dT%H:%M}"'.format(datetime_type)) else: try: if indexDoc["mappings"]["catalogo"]["properties"][headers[pos]]["type"] == 'integer': _data = _data.replace('"%' + header + '%"', row[pos]) else: _data = _data.replace('%' + header + '%', row[pos]) except: _data = _data.replace('"%' + header + '%"', str(yaml.load(row[pos]))) pos += 1 if id_column is not None: index_row = {"index": {"_index": elastic_index, "_type": elastic_type, '_id': row[headers_position[id_column]]}} else: index_row = {"index": {"_index": elastic_index, "_type": elastic_type}} json_string = json.dumps(index_row) + "\n" + _data + "\n" json_string = json_string.replace("'null'",'null').replace('None',"null") json_string = json_string.replace('"[','[').replace(']"',"]") to_elastic_string = json_string.replace("'",'"').encode('utf-8') full_json_string += to_elastic_string connection = http.client.HTTPConnection(elastic_address) connection.request('POST', url=endpoint, headers = headers_post, body=to_elastic_string) response = connection.getresponse() print("Returned status code: ", response.status) print("Returned status text ", response.read()) print('Iteración: ' + str(count)) count += 1 # Save json result today = date.today() s3.put_object(Body= full_json_string, Bucket = "serverlesspub", Key = "elastic/pub-monthly-catalog.temporal_{}".format(today)) except Exception as e: raise e
def lambda_handler(event, context): response = { 'StackId': event['StackId'], 'RequestId': event['RequestId'], 'LogicalResourceId': event['LogicalResourceId'], 'Status': 'SUCCESS' } request_parameters = { "clusterName": str(event['ResourceProperties']['ClusterName']), "roleArn": str(event['ResourceProperties']['RoleArn']), "vpcId": str(event['ResourceProperties']['VPC']), "subnets": str(event['ResourceProperties']['SubnetIds']).split(" "), "securityGroups": [str(event['ResourceProperties']['SecurityGroups'])] } if event['RequestType'] == 'Create': method = 'POST' path = '/clusters' if event['RequestType'] == 'Delete': method = 'DELETE' path = '/clusters/' + request_parameters['clusterName'] request_parameters = json.dumps(request_parameters) host = 'eks.us-west-2.amazonaws.com' region = event['ResourceProperties']['Region'] service = 'eks' endpoint = 'https://eks.us-west-2.amazonaws.com' request_url = endpoint + path content_type = 'application/json' def sign(key, msg): return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest() def getSignatureKey(key, date_stamp, regionName, serviceName): kDate = sign(('AWS4' + key).encode('utf-8'), date_stamp) kRegion = sign(kDate, regionName) kService = sign(kRegion, serviceName) kSigning = sign(kService, 'aws4_request') return kSigning t = datetime.datetime.utcnow() amz_date = t.strftime('%Y%m%dT%H%M%SZ') date_stamp = t.strftime('%Y%m%d') canonical_uri = path canonical_querystring = '' canonical_headers = 'content-type:' + content_type + '\n' + 'host:' + host + '\n' + 'x-amz-date:' + amz_date + '\n' signed_headers = 'content-type;host;x-amz-date' access_key = os.environ.get('AWS_ACCESS_KEY') secret_key = os.environ.get('AWS_SECRET_KEY') if access_key is None or secret_key is None: print('No access key is available.') sys.exit() payload_hash = hashlib.sha256(request_parameters).hexdigest() canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash algorithm = 'AWS4-HMAC-SHA256' credential_scope = date_stamp + '/' + region + '/' + service + '/' + 'aws4_request' string_to_sign = algorithm + '\n' + amz_date + '\n' + credential_scope + '\n' + hashlib.sha256( canonical_request).hexdigest() signing_key = getSignatureKey(secret_key, date_stamp, region, service) signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest() authorization_header = algorithm + ' ' + 'Credential=' + access_key + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature headers = { 'Content-Type': content_type, 'Host': host, 'X-Amz-Date': amz_date, 'Authorization': authorization_header } try: if event['RequestType'] == 'Create': r = requests.post(request_url, data=request_parameters, headers=headers) if event['RequestType'] == 'Delete': r = requests.delete(request_url, data=request_parameters, headers=headers) responseData = json.loads(r.text) print('Response code: %d' % r.status_code) return cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData, "CustomResourcePhysicalID") except Exception as E: response[ 'Reason'] = 'Event Failed - See CloudWatch logs for the Lamba function backing the custom resource for details' return cfnresponse.send(event, context, cfnresponse.FAILED, responseData, "CustomResourcePhysicalID")
def label_remove(repo_owner, repo_name, label): url = api_url + "/repos/" + repo_owner + "/" + repo_name + "/labels/" + label headers = {'Authorization': 'Basic ' + token} r = requests.delete(url, headers=headers) return r
def lambda_handler(event, context): # Replace this with your firebase project firebaseProject = "https://analytics007-7d4bc.firebaseio.com/" url = firebaseProject+"/temp.json" token = "team7_living" # If a userID was passed in via the URL, use that userID. queryStringParameters = event.get('queryStringParameters',{}) print(queryStringParameters) if queryStringParameters and "uid" in queryStringParameters and "role" in queryStringParameters: print("here") newUser = queryStringParameters['uid'] roleDeclared = queryStringParameters['role'] print(newUser) print(roleDeclared) # This is a list of all the current api options apiOptions = ["users","courseMembers","cohortCourses","cohorts","courses","solutions"] for filename in apiOptions: download_file(token,filename) userName = returnName(newUser, "/tmp/users.json", "/tmp/cohortCourses.json", "/tmp/cohorts.json") userNameRoute = firebaseProject+"/userName.json" requests.delete(url=userNameRoute) newLogin = {"name": userName} nameUrl = firebaseProject+"/userName.json" requests.put(url=nameUrl, data=json.dumps(newLogin)) # Getting userAchievements data from firebase resp = requests.get("https://analytics-project-89af3.firebaseio.com/userAchievements.json") userAchievements = json.loads(resp.text) """Instructor's output""" # Array to store all schools all_sch = flatten_cohortCourses("/tmp/courseMembers.json", "/tmp/users.json", "/tmp/cohortCourses.json") # Dictionary of all admins all_admins = get_admin("/tmp/cohorts.json") # Array of all students registered all_students = [] # Array of students with no name registered users_with_no_name = [] create_student_array("/tmp/courseMembers.json", all_students) add_name_to_student("/tmp/users.json", all_students, users_with_no_name) add_school_to_student(all_students, all_sch) add_achievements_lastactive_to_student(all_students, userAchievements) # Dictionary of students sorted by school student_dict = sort_all_students_by_sch(all_students, all_sch) add_cohort_id_to_student("/tmp/cohortCourses.json", student_dict) # Dictionary of cohorts as key and values are dictionary of levels that store 5 number summary as values cohortstats = {"-L60lMb7QBWxMsd0aIHE":{}, "-L60nbB_SwQIdvjEEIp7":{}, "-L60ng0QZnIUq5u3w3kU":{}} levelstats = {"-L60lMb7QBWxMsd0aIHE":{}, "-L60nbB_SwQIdvjEEIp7":{}, "-L60ng0QZnIUq5u3w3kU":{}} # Dictionary of cohorts as key and values are list of 20 levels (which is also a list) that stores every student's playtime in that level overallstats = {} # Getting userAchievements data from firebase resp2 = requests.get("https://analytics007-7d4bc.firebaseio.com/allEvents.json") allEvents = json.loads(resp2.text) achievements_dict = {} update_all_logs(allEvents, achievements_dict) update_five_number_summary(cohortstats, overallstats, all_students, achievements_dict) sub_function_for_studentCC_status(levelstats, all_students, achievements_dict) # Getting liveData from firebase resp3 = requests.get("https://analytics007-7d4bc.firebaseio.com/liveData.json") liveData = json.loads(resp3.text) # Getting predicted timings resp4 = requests.get("https://analytics007-7d4bc.firebaseio.com/predictedlevels.json") predictedTimings = json.loads(resp4.text) # Push and update firebase user ids, for authentication when user logins all_admin_id = [] for i in all_admins: for identity in all_admins[i][1]: if identity not in all_admin_id: all_admin_id.append(identity) all_student_id = [student.get_uid() for student in all_students] all_school_id = [] for student in all_students: if (student.get_sch_id() not in all_school_id): all_school_id.append(student.get_sch_id()) instructor_authenticateUrl = firebaseProject+"/authentication/instructorID.json" student_authenticateUrl = firebaseProject+"/authentication/studentID.json" requests.put(url=instructor_authenticateUrl, data=json.dumps(all_school_id)) requests.put(url=student_authenticateUrl, data=json.dumps(all_student_id)) print("user!") print(newUser) print("role!") print(roleDeclared) print(type(roleDeclared)) result = {} # Checking for identity of administrator if ((str(roleDeclared) == '1') and (newUser in all_admin_id)): sign_in_data = get_signin_number(allEvents, "/tmp/courseMembers.json", "/tmp/cohortCourses.json", "/tmp/cohorts.json") sign_up_data = get_signup_number(allEvents, "/tmp/courseMembers.json", "/tmp/cohortCourses.json", "/tmp/cohorts.json") total_no_school = get_total_no_school("/tmp/cohorts.json", "/tmp/cohortCourses.json") total_students_per_sch = get_total_students_per_sch("/tmp/cohorts.json", "/tmp/cohortCourses.json", "/tmp/courses.json", "/tmp/courseMembers.json") weekly_submits = weeklysubmissions(allEvents, "/tmp/courseMembers.json", "/tmp/cohortCourses.json", "/tmp/cohorts.json") instructor_activity = getinstructoractivity(allEvents, "/tmp/courseMembers.json", "/tmp/users.json") inactive_schools = inactiveschools(allEvents, "/tmp/courseMembers.json", "/tmp/cohortCourses.json", "/tmp/cohorts.json") instructor_contacts = admin_contact("/tmp/cohorts.json", "/tmp/cohortCourses.json", "/tmp/courses.json") schoolsInstructorRoute = firebaseProject+"/adminInfo/schoolsInstructor.json" requests.delete(url=schoolsInstructorRoute) for cohort in instructor_contacts: schCount = 1 for sch in instructor_contacts[cohort]: newSchool = {"school": sch, "contactNumber": instructor_contacts[cohort][sch]['contact no.'], "instructorName": instructor_contacts[cohort][sch]['instructor'], "email": instructor_contacts[cohort][sch]['email']} newUrl = firebaseProject+"/adminInfo/schoolsInstructor/"+cohort+"/"+str(schCount)+".json" requests.put(url=newUrl, data=json.dumps(newSchool)) schCount+=1 inactiveSchoolsRoute = firebaseProject+"/adminInfo/inactiveSchools.json" requests.delete(url=inactiveSchoolsRoute) for cohort in inactive_schools: schCount = 1 for sch in inactive_schools[cohort]: newSchool = {"school": sch} newUrl = firebaseProject+"/adminInfo/inactiveSchools/"+cohort+"/"+str(schCount)+".json" requests.put(url=newUrl, data=json.dumps(newSchool)) schCount+=1 weeklyPostingRoute = firebaseProject+"/adminInfo/instructorActivity.json" requests.delete(url=weeklyPostingRoute) for instruct in instructor_activity: for wk in instructor_activity[instruct]: newPosting = {"postingCount": instructor_activity[instruct][wk]} newUrl = firebaseProject+"/adminInfo/instructorActivity/"+instruct+"/"+str(wk)+".json" requests.put(url=newUrl, data=json.dumps(newPosting)) weeklySubmitsRoute = firebaseProject+"/adminInfo/weeklySubmits.json" requests.delete(url=weeklySubmitsRoute) for cohort in weekly_submits: for week in weekly_submits[cohort]: newWeekly = {"submissionsCount": weekly_submits[cohort][week]} newUrl = firebaseProject+"/adminInfo/weeklySubmits/"+cohort+"/"+str(week)+".json" requests.put(url=newUrl, data=json.dumps(newWeekly)) totalSchoolsRoute = firebaseProject+"/adminInfo/staticInfo.json" requests.delete(url=totalSchoolsRoute) allSchoolsStatic = {"totalCount": total_no_school[0], "primaryCount": total_no_school[1]['2018 National Coding Championships - Primary'], "juniorCount": total_no_school[1]['2018 National Coding Championships - Junior'], "seniorCount": total_no_school[1]['2018 National Coding Championships - Senior']} newUrl = firebaseProject+"/adminInfo/staticInfo.json" requests.put(url=newUrl, data=json.dumps(allSchoolsStatic)) studentsPerSchoolRoute = firebaseProject+"/adminInfo/studentsPerSchool.json" requests.delete(url=studentsPerSchoolRoute) for cohort in total_students_per_sch: schoolInCohortCount = 1 for school in total_students_per_sch[cohort]: newSchoolData = {"schoolName": school, "studentCount": total_students_per_sch[cohort][school]} newUrl = firebaseProject+"/adminInfo/studentsPerSchool/"+cohort+"/"+str(schoolInCohortCount)+".json" requests.put(url=newUrl, data=json.dumps(newSchoolData)) schoolInCohortCount+=1 signupRateRoute = firebaseProject+"/adminInfo/signUpRate.json" requests.delete(url=signupRateRoute) for cohort in sign_up_data: for sch in sign_up_data[cohort]: signupCount = 1 for week in sign_up_data[cohort][sch]: newSignUp = {"week": week, "signups": sign_up_data[cohort][sch][week]} newUrl = firebaseProject+"/adminInfo/signUpRate/"+cohort+"/"+sch+"/"+str(signupCount)+".json" requests.put(url=newUrl, data=json.dumps(newSignUp)) signupCount+=1 signinRateRoute = firebaseProject+"/adminInfo/signInRate.json" requests.delete(url=signinRateRoute) for cohort in sign_in_data: for sch in sign_in_data[cohort]: signinCount = 1 for week in sign_in_data[cohort][sch]: newSignIn = {"week": week, "signins": sign_in_data[cohort][sch][week]} newUrl = firebaseProject+"/adminInfo/signInRate/"+cohort+"/"+sch+"/"+str(signinCount)+".json" requests.put(url=newUrl, data=json.dumps(newSignIn)) signinCount+=1 # Get data using python requests resp = requests.get(url=url) data = json.loads(resp.text) print("Data after update", data) result = { "isBase64Encoded": False, "statusCode": 200, "headers": {}, "body": json.dumps({"message": "Successfully updated administrator info"}) } # Checking for identity of instructor elif ((str(roleDeclared) == '2') and (newUser in all_school_id)): print("entered here") student_info = get_student_ingame_info(newUser, "/tmp/courseMembers.json", "/tmp/users.json", userAchievements) schools_perf = get_all_schools_performance(newUser, student_info, "/tmp/cohortCourses.json", "/tmp/courseMembers.json", "/tmp/users.json", userAchievements) student_overall_perf = get_student_overall_performance(newUser, "/tmp/courseMembers.json", "/tmp/users.json", allEvents) schools_time_level_correlation = getAverageTimeLevels(newUser, "/tmp/courseMembers.json", "/tmp/cohortCourses.json", liveData, "/tmp/users.json") videoIDs = ["-L8PEzVB0fRyhmQAOBx1", "-L8Gz-q54aVyOc3icUgO", "-L8H-0i0w2y8TTccE6T2", "-L8H-we7JsrUikMrESh5", "-L8H0WhmBwqjY3FHlkv8"] videoPauseStats = [{video: getPauseStats(video, "/tmp/solutions.json")} for video in videoIDs] top3_failed_levels = topthreeflaggedlevels(newUser, "/tmp/courseMembers.json", "/tmp/users.json", achievements_dict, overallstats, all_students) weaker_students = weakerstudents(newUser, "/tmp/courseMembers.json", "/tmp/users.json", overallstats, all_students, achievements_dict) #video_status = getvideoStats(newUser, "/tmp/users.json", "/tmp/solutions.json") student_count = numberOfStudentsInSchool(newUser, "/tmp/cohortCourses.json") studentCountRoute = firebaseProject+"/instructorInfo/studentCount.json" requests.delete(url=studentCountRoute) currentCount = {"count": student_count} newUrl = firebaseProject+"/instructorInfo/studentCount.json" requests.put(url=newUrl, data=json.dumps(currentCount)) """videoStatusRoute = firebaseProject+"/instructorInfo/videoStatus.json" requests.delete(url=videoStatusRoute) videoCount = 1 # Adding into firebase for vid in video_status: newVideo = {"videoName": video_status[vid]['videoname'], "flaggedStudents": video_status[vid]['studentnames']} newUrl = firebaseProject+"/instructorInfo/videoStatus/"+str(videoCount)+".json" requests.put(url=newUrl, data=json.dumps(newVideo)) videoCount+=1""" weakerStudentsRoute = firebaseProject+"/instructorInfo/weakerStudents.json" requests.delete(url=weakerStudentsRoute) weakerStudentCount = 1 # Adding into firebase for student in weaker_students: newWeakerStudent = {"studentName": student, "failedCount": weaker_students[student]['failed'], "failedLevels": weaker_students[student]['failed levels']} newUrl = firebaseProject+"/instructorInfo/weakerStudents/"+str(weakerStudentCount)+".json" requests.put(url=newUrl, data=json.dumps(newWeakerStudent)) weakerStudentCount+=1 mostFailedRoute = firebaseProject+"/instructorInfo/mostFailed.json" requests.delete(url=mostFailedRoute) topFailedCount = 1 # Adding into firebase for level in range(len(top3_failed_levels)): newFailedLevel = {"levelName": top3_failed_levels[level]['level'], "failedCount": top3_failed_levels[level]['No. students failed'], "topic": top3_failed_levels[level]['topic']} newUrl = firebaseProject+"/instructorInfo/mostFailed/"+str(topFailedCount)+".json" requests.put(url=newUrl, data=json.dumps(newFailedLevel)) topFailedCount+=1 # Getting students only from selected school (user input) student_info_by_user = student_dict[newUser] # Deleting previous records studentTableRoute = firebaseProject+"/instructorInfo/studentTable.json" requests.delete(url=studentTableRoute) count = 1 # Adding into firebase for student in student_info_by_user: newStudent = {"studentNumber": count, "studentName": student.get_name(), "studentID": student.get_uid(), "studentSchool": student.get_school(), "studentCohort": student.get_cohort_id(), "studentPhotoLink": student.get_photo(), "studentCompleted": student.get_achievements(), "studentLastActive": student.get_lastactive()} newUrl = firebaseProject+"/instructorInfo/studentTable/"+str(count)+".json" requests.put(url=newUrl, data=json.dumps(newStudent)) count+=1 schoolCurrentProgressRoute = firebaseProject+"/instructorInfo/schoolProgress.json" requests.delete(url=schoolCurrentProgressRoute) schoolProgressCount = 1 for school in schools_time_level_correlation: newSchoolProgress = {"schoolName": school, "schoolTiming": schools_time_level_correlation[school]['avgTimeSpent'], "schoolLevels": schools_time_level_correlation[school]['avgLevels']} newUrl = firebaseProject+"/instructorInfo/schoolProgress/"+str(schoolProgressCount)+".json" requests.put(url=newUrl, data=json.dumps(newSchoolProgress)) schoolProgressCount+=1 studentCurrentProgressRoute = firebaseProject+"/instructorInfo/studentProgress.json" requests.delete(url=studentCurrentProgressRoute) progressCount = 1 for student in student_info: newStudentProgress = {"studentName": student_info[student]['name'], "studentLevel": student_info[student]['level'], "studentGameID": student_info[student]['gameid']} newUrl = firebaseProject+"/instructorInfo/studentProgress/"+str(progressCount)+".json" requests.put(url=newUrl, data=json.dumps(newStudentProgress)) progressCount+=1 studentOverallPerfRoute = firebaseProject+"/instructorInfo/studentOverall.json" requests.delete(url=studentOverallPerfRoute) overallCount = 1 for student in student_overall_perf: newStudentOverall = {"studentName": student_overall_perf[student]['name'], "studentLevel": student_overall_perf[student]['completed levels'], "studentGameID": student_overall_perf[student]['gameid'], "averageTiming": student_overall_perf[student]['time per level'], "totalTimeSpent": student_overall_perf[student]['total time']} newUrl = firebaseProject+"/instructorInfo/studentOverall/"+str(overallCount)+".json" requests.put(url=newUrl, data=json.dumps(newStudentOverall)) overallCount+=1 schoolPerformanceRoute = firebaseProject+"/instructorInfo/schoolPerformance.json" requests.delete(url=schoolPerformanceRoute) schoolCount = 1 for sch in schools_perf: newSchool = {"schoolID": sch, "schoolName": schools_perf[sch]['name'], "schoolAverageTime": schools_perf[sch]['avgtime']} newUrl = firebaseProject+"/instructorInfo/schoolPerformance/"+str(schoolCount)+".json" requests.put(url=newUrl, data=json.dumps(newSchool)) schoolCount+=1 videoPauseRoute = firebaseProject+"/instructorInfo/videoPause.json" requests.delete(url=videoPauseRoute) # videoPauseStats is a list of dictionaries for video in range(len(videoPauseStats)): # videoPauseStats[video] is a dictionary for key in videoPauseStats[video]: pauseCount = 1 for interval in videoPauseStats[video][key]: newPauseInterval = {"videoInterval": interval, "pauseCount": videoPauseStats[video][key][interval]} newUrl = firebaseProject+"/instructorInfo/videoPause/"+key+"/"+str(pauseCount)+".json" requests.put(url=newUrl, data=json.dumps(newPauseInterval)) pauseCount+=1 # Get data using python requests resp = requests.get(url=url) data = json.loads(resp.text) print("Data after update", data) result = { "isBase64Encoded": False, "statusCode": 200, "headers": {}, "body": json.dumps({"message": "Successfully updated instructor info"}) } # Checking for identity of student elif ((str(roleDeclared) == '3') and (newUser in all_student_id)): # Getting latest articles, unable to work due to inability to import BeautifulSoup """ url_1 = 'https://medium.com/topic/javascript' url_2 = 'https://medium.com/topic/technology' js_articles = get_website_element(url_1) jsArticlesRoute = firebaseProject+"/studentInfo/articles/js.json" requests.delete(url=jsArticlesRoute) jsCount = 1 for article in range(3): newJsArticle = {"title": js_articles[article][0], "url": js_articles[article][1], "summary": js_articles[article][2]} newUrl = firebaseProject+"/studentInfo/articles/js/"+str(jsCount)+".json" requests.put(url=newUrl, data=json.dumps(newJsArticle)) jsCount+=1 """ # Getting dictionary with level name, level no. and predicted time personalPrediction = predictedTimings[newUser] nextLevelPredictionRoute = firebaseProject+"/studentInfo/predictedTimings.json" requests.delete(url=nextLevelPredictionRoute) newPrediction = {"levelName": personalPrediction['level name'], "levelNumber": personalPrediction['next level'], "predictedTime": personalPrediction['predicted time']} newUrl = firebaseProject+"/studentInfo/predictedTimings.json" requests.put(url=newUrl, data=json.dumps(newPrediction)) personalProgress = student_percentile_all_levels(newUser, levelstats, all_students, achievements_dict) personalProgressRoute = firebaseProject+"/studentInfo/personalProgress.json" requests.delete(url=personalProgressRoute) for level in personalProgress: newProgress = {"levelName": level, "timeTaken": personalProgress[level]['time taken'], "percentile": personalProgress[level]['percentile']} newUrl = firebaseProject+"/studentInfo/personalProgress/"+level+".json" requests.put(url=newUrl, data=json.dumps(newProgress)) levelCount = 1 # Gets all levels a student has completed, with time taken and date all_completed_levels_stat = get_studentAchieve(newUser, achievements_dict) studentCodeCombatRoute = firebaseProject+"/studentInfo/codeCombat.json" requests.delete(url=studentCodeCombatRoute) for level in all_completed_levels_stat: for key in all_completed_levels_stat[level]: newLevel = {"LevelName": level, "CompletedDate": key, "timeTaken": all_completed_levels_stat[level][key][0], "clearedStatus": all_completed_levels_stat[level][key][1]} newUrl = firebaseProject+"/studentInfo/codeCombat/"+str(levelCount)+".json" requests.put(url=newUrl, data=json.dumps(newLevel)) levelCount+=1 # Getting student's 20 levels performance in relation to his/her peers student_cohort = get_cohort(newUser, all_students) student_timings = get_student_20_level_performance(newUser, achievements_dict, all_students, cohortstats) studentPerformanceRoute = firebaseProject+"/studentInfo/levelTimings.json" requests.delete(url=studentPerformanceRoute) timingLevelCount = 1 for level in student_timings: if ('self' in student_timings[level]): newLevelTiming = {"levelNumber": level, "25thPercentile": student_timings[level]['25th'], "75thPercentile": student_timings[level]['75th'], "maxTiming": student_timings[level]['max'], "minTiming": student_timings[level]['min'], "medianTiming": student_timings[level]['median'], "personalTiming": student_timings[level]['self']} else: newLevelTiming = {"levelNumber": level, "25thPercentile": student_timings[level]['25th'], "75thPercentile": student_timings[level]['75th'], "maxTiming": student_timings[level]['max'], "minTiming": student_timings[level]['min'], "medianTiming": student_timings[level]['median'], "personalTiming": "No attempts yet"} newUrl = firebaseProject+"/studentInfo/levelTimings/"+str(timingLevelCount)+".json" requests.put(url=newUrl, data=json.dumps(newLevelTiming)) timingLevelCount+=1 # Getting student past performance for 20 levels pastPerformance = get_pastPerformance(newUser, achievements_dict, overallstats, all_students) pastPerformanceRoute = firebaseProject+"/studentInfo/pastPerformance.json" requests.delete(url=pastPerformanceRoute) performanceLevelCount = 1 for level in pastPerformance: pastPerformanceTiming = {"levelNumber": level, "timeTaken": pastPerformance[level]} newUrl = firebaseProject+"/studentInfo/pastPerformance/"+str(performanceLevelCount)+".json" requests.put(url=newUrl, data=json.dumps(pastPerformanceTiming)) performanceLevelCount+=1 # Getting student's flagged levels flaggedLevels = student_flagged_levels(newUser, achievements_dict, overallstats, all_students) flaggedLevelsRoute = firebaseProject+"/studentInfo/flaggedLevels.json" requests.delete(url=flaggedLevelsRoute) flaggedLevelsCount = 1 for level in range(len(flaggedLevels)): newFlaggedLevel = {"levelNumber": flaggedLevels[level]['level'], "levelName": flaggedLevels[level]['name'], "levelPercentile": flaggedLevels[level]['percentile'], "levelTopic": flaggedLevels[level]['topic'], "enrichmentLink": flaggedLevels[level]['w3 link']} newUrl = firebaseProject+"/studentInfo/flaggedLevels/"+str(flaggedLevelsCount)+".json" requests.put(url=newUrl, data=json.dumps(newFlaggedLevel)) flaggedLevelsCount+=1 # Getting number of levels a student has completed per week, compared to his/her peers completedLevelStats = get_completed_levels_stats(newUser, allEvents, all_students) completedLevelStatsRoute = firebaseProject+"/studentInfo/completedLevelStats.json" requests.delete(url=completedLevelStatsRoute) completedCount = 1 for week in completedLevelStats: newCompletedLevel = {"week": week, "cohortCompleted": completedLevelStats[week]['cohort'], "personalCompleted": completedLevelStats[week]['self']} newUrl = firebaseProject+"/studentInfo/completedLevelStats/"+str(completedCount)+".json" requests.put(url=newUrl, data=json.dumps(newCompletedLevel)) completedCount+=1 # Getting static info for student achievementPercentile = achievement_percentile(newUser, userAchievements, all_students) videoUnwatchedCount = getUnwatchedCount(newUser, "/tmp/solutions.json") print(videoUnwatchedCount) achievementPercentileRoute = firebaseProject+"/studentInfo/staticInfo.json" requests.delete(url=achievementPercentileRoute) statCount = 1 currentStats = {"Cohort average": achievementPercentile['Cohort average'], "Current level": achievementPercentile['Current level'], "Percentile in cohort": achievementPercentile['Percentile in cohort'], "Unwatched Video": videoUnwatchedCount} statsUrl = firebaseProject+"/studentInfo/staticInfo/"+str(statCount)+".json" requests.put(url=statsUrl, data=json.dumps(currentStats)) # Get data using python requests resp = requests.get(url=url) data = json.loads(resp.text) print("Data after update", data) result = { "isBase64Encoded": False, "statusCode": 200, "headers": {}, "body": json.dumps({"message": "Successfully updated student info"}) } """elif ((newUser not in all_student_id) and (newUser not in all_school_id) and (newUser not in all_admin_id)): result = { "isBase64Encoded": False, "statusCode": 502, "headers": "no such user", "body": json.dumps({"message": "Incorrect credentials, no such user!"}) }""" return result
def make_delete(url): return requests.delete(headers=HEADERS, url=urljoin(os.getenv('OKTA_URL'), url))
def delete(self, url, headers): logger.debug("DELETE Requests:\nurl=%s" % url) r = requests.delete(url=url, headers=headers) logger.debug("Response: %s" % r.text) return __check_resp__(r)
def delete_dashboard(dashboard_id, org, project, api_key): response = requests.delete( 'https://api.lightstep.com/public/v0.2/%s/projects/%s/metric_dashboards/%s' % (org, project, dashboard_id), headers={'Authorization': 'Bearer %s' % api_key}) return response
def handler(event, context): url = APIEP AuthToken = encode(UserName, PassWord) timestamp = int(time.time()) headers = { 'content-type': 'application/json', 'authorizationToken': AuthToken } logger.info('Received event: {}'.format(json.dumps(event))) try: responseData = {} # Assume failure unless we get 200 response responseStatus = cfnresponse.FAILED if event['RequestType'] == 'Delete': payload = {"Key": {"targid": {"S": TargId}}, "TableName": "waflab"} response = requests.delete(url, data=json.dumps(payload), headers=headers) responseData = {'StatusCode': response.status_code} responseStatus = cfnresponse.SUCCESS elif event['RequestType'] == 'Create' or event[ 'RequestType'] == 'Update': min_char = 8 max_char = 8 allchar = string.ascii_lowercase + string.digits UUID = "".join( choice(allchar) for x in range(randint(min_char, max_char))) EXP = timestamp + +12 * 60 * 60 payload = { "Item": { "targid": { "S": TargId }, "uuid": { "S": UUID }, "stackid": { "S": StackId }, "timestamp": { "N": str(timestamp) }, "ttl": { "N": str(EXP) } }, "TableName": "waflab" } response = requests.post(url, data=json.dumps(payload), headers=headers) logger.info('Response Text: {}'.format(response.text)) responseData = {'UUID': UUID} if 'deny' not in response.text: responseStatus = cfnresponse.SUCCESS else: responseStatus = cfnresponse.FAILED except ClientError as e: logger.error('Error: {}'.format(e)) responseStatus = cfnresponse.FAILED # Log the response status logger.info('Returning response status of: {}'.format(responseStatus)) # Send result to stack cfnresponse.send(event, context, responseStatus, responseData)
def clearIndices(): host = 'https://search-photos-khcjss3c77o2erqokp2pi6dvea.us-east-1.es.amazonaws.com/photos/' res = requests.delete(host) res = json.loads(res.content.decode('utf-8')) return res