def check_status(path, media_type): if media_type == 'hls': try: r = requests.head(path, timeout=5) if (r.status_code == 200) or (r.status_code == 405): return 1 else: return 0 except Exception as ex: # raise Exception("timeout", ex) # print("timeout") return 0 pass elif media_type == 'youtube': vid_id = video_id(path) api_constr = "https://www.googleapis.com/youtube/v3/videos?part=contentDetails&id=" + vid_id + "&key=" + def_youtube_api_key restr = get_youtube_restriction(api_constr) if restr == 'nores': return 1 elif restr == 'dead': return 0 else: if country in restr: return 0 else: return 1 else: return 1
def skip_reload_same_etag(resource_type, cld_type, cld_public_id, event_etag): """ Identify an attempt to re-upload an object with identical content Make sure to read all the gotchas in the README.md before enabling it """ # using cache busting to avoid ETAG caching for short-cycle regret url = "{}/{}/{}/v{}/{}".format( environ["cld_delivery_url"], resource_type, cld_type, random.randint(2, 9999999999), cld_public_id) head_result = requests.head(url) return head_result.status_code < 400 and "etag" in head_result.headers and ((""" + event_etag + """) == head_result.headers["etag"])
def lambda_handler(event, context): r = requests.head(websiteURL) if r.status_code == 200: print "Website Is Alive!" else: sns = boto3.client('sns') sns.publish( TopicArn = topicArnCode, Subject = 'Website Offline' , Message = 'Status code 200 was expected but returned was '+ str(r.status_code) )
def get_recording_size(event, context): """ Get the size of the video file """ r = requests.head(event['url']) if r.status_code == 200: size = int(r.headers['Content-Length']) else: size = -1 return merge_two_dicts(event, { "size": size, "is_completed": False })
def CheckTargets(Targets): global Logger, ConnectionTimeout TargetStates = [] Schemas = {} Schemas['80'] = 'http' Schemas['443'] = 'https' TargetList = Targets.split(',') Logger.debug('Working with %d targets' % len(TargetList)) for Target in TargetList: Logger.debug('Checking %s' % Target) try: (Address, Port) = Target.split(':') except: Logger.error('Failed to extract ADDRESS:PORT from %s' % Target) continue if Port not in Schemas: Logger.error( 'Port not listed in schemas for this to work - ignoring %s' % Target) continue Logger.info('Connecting to %s://%s' % (Schemas[Port], Address)) try: Response = requests.head(Schemas[Port] + '://' + Address, timeout=ConnectionTimeout) Logger.info('Connect succeeded' ) # We actually don't care what the response is TargetStates.append(True) except Exception as e: Logger.info('Connect failed: %s' % str(e)) TargetStates.append(False) return any(TargetStates)
def configure_embargoed_countries_bucket(oring_bucket, embargoed_countries_bucket, embargoed_countries_key, countries_parser_arn): logging.getLogger().debug("configure_embargoed_countries_bucket - Start") logging.getLogger().debug("oring_bucket: %s"%oring_bucket) logging.getLogger().debug("embargoed_countries_bucket: %s"%embargoed_countries_bucket) logging.getLogger().debug("embargoed_countries_key: %s"%embargoed_countries_key) logging.getLogger().debug("countries_parser_arn: %s"%countries_parser_arn) # Configure bucket event to call embargoed countries parser file_name = embargoed_countries_key.split('/')[-1] file_name_parts = file_name.rsplit('.', 1) notification_conf = {'LambdaFunctionConfigurations':[{ 'Id': 'Call embargoed countries parser', 'LambdaFunctionArn': countries_parser_arn, 'Events': ['s3:ObjectCreated:*'], 'Filter': {'Key': {'FilterRules': [ {'Name': 'prefix','Value': file_name_parts[0]}, {'Name': 'suffix','Value': file_name_parts[1]} ]}} }]} s3_client = boto3.client('s3') response = s3_client.put_bucket_notification_configuration(Bucket=embargoed_countries_bucket, NotificationConfiguration=notification_conf) # upload embargoed-countries.json file_name = embargoed_countries_key.split('/')[-1] local_file_path = '/tmp/%s'%file_name prefix = 'https://s3.amazonaws.com/' + oring_bucket + '/' response = requests.head(prefix + embargoed_countries_key) if 'x-amz-bucket-region' in response.headers and response.headers['x-amz-bucket-region'] != 'us-east-1': prefix = prefix.replace('https://s3', 'https://s3-'+response.headers['x-amz-bucket-region']) response = requests.get(prefix + embargoed_countries_key) open(local_file_path, 'wb').write(response.content) s3_client = boto3.client('s3') s3_client.upload_file(local_file_path, embargoed_countries_bucket, file_name) logging.getLogger().debug("configure_embargoed_countries_bucket - End")
def check_site(url, metric): STAT = 1 print("Checking %s " % url) try: response = requests.head("http://" + url) response.close() except requests.exceptions.URLRequired as e: if hasattr(e, 'code'): print("[Error:] Connection to %s failed with code: " % url + str(e.code)) STAT = 100 write_metric(STAT, metric) if hasattr(e, 'reason'): print("[Error:] Connection to %s failed with code: " % url + str(e.reason)) STAT = 100 write_metric(STAT, metric) except requests.exceptions.HTTPError as e: if hasattr(e, 'code'): print("[Error:] Connection to %s failed with code: " % url + str(e.code)) STAT = 100 write_metric(STAT, metric) if hasattr(e, 'reason'): print("[Error:] Connection to %s failed with code: " % url + str(e.reason)) STAT = 100 write_metric(STAT, metric) print('HTTPError!!!') if STAT != 100: STAT = response.status_code print(STAT) return STAT
from botocore.vendored import requests from botocore.exceptions import ClientError import boto3 headers = {'host': 'd138jiqnlw2og4.cloudfront.net'} response = requests.head('http://d138jiqnlw2og4.ATL50.cloudfront.net/test.txt', headers=headers) print(response)