def lambda_handler(event, context): try: ## Getting URLs to load into Pandas NYT_URL = os.environ['New_York_Times_COVID19_Data_URL'] JH_URL = os.environ['Johns_Hopkins_COVID19_Data_URL'] BUCKET = os.environ['BUCKET'] TOPIC = os.environ['TOPIC'] ERROR_TOPIC = os.environ['ERROR_TOPIC'] datetimestamp = datetime.datetime.today().strftime('%Y%m%dT%H%M%S') ## Uploading NYT file http = urllib3.PoolManager() filename = "nyt_covid19_" + datetimestamp + ".csv" nyt_key = 'NYT/' + filename #r = requests.get(NYT_URL) r = http.request('GET', NYT_URL, preload_content=False) #r.raise_for_status() if r.status != 200: raise Exception("NYT File: {}".format(r.data.decode('utf-8'))) s3.upload_fileobj(r, BUCKET, nyt_key) ## Uploading JH file http = urllib3.PoolManager() filename = "jh_covid19_" + datetimestamp + ".csv" jh_key = 'JH/' + filename #r = requests.get(JH_URL) r = http.request('GET', JH_URL, preload_content=False) if r.status != 200: raise Exception("JH File: {}".format(r.data.decode('utf-8'))) #r.raise_for_status() s3.upload_fileobj(r, BUCKET, jh_key) response = sns.publish( TopicArn=TOPIC, Message=json.dumps({ 'NYT': nyt_key, 'JH': jh_key }), ) print(response) msg = "Files loaded to S3 successfully. SNS Notification sent to kick start ETL function. ID: {}".format( response["MessageId"]) return {'statusCode': 200, 'body': json.dumps(msg)} except Exception as e: print("Error occured during execution.") msg = "File download lambda function failed. Error msg : {}".format(e) response = sns.publish( TopicArn=ERROR_TOPIC, Message=json.dumps(msg), ) msg = "Files are not loaded into S3. SNS notification sent. ID: {}".format( response["MessageId"]) return {'statusCode': 200, 'body': json.dumps(msg)}
def get_link_to_manifest(GITHUB_TOKEN, full_name_head_repo, branch_head_repo, pr_module): logger.debug("Full name head repo: %s", full_name_head_repo) logger.debug("Branch head repo: %s", branch_head_repo) # GET /repos/:owner/:repo/contents/:path url = 'https://api.github.com/repos/%s/contents/%s?ref=%s#' % ( full_name_head_repo, pr_module, branch_head_repo) http = urllib3.PoolManager() res = http.request( 'GET', url, headers={ 'Accept': 'application/vnd.github.v3.raw', 'User-Agent': 'https://gitlab.com/itpp/odoo-devops/blob/master/docs/git/github-review-bot.rst', 'Authorization': 'token %s' % GITHUB_TOKEN }) list_files = json.loads(res.data) logger.debug("list_files: \n%s", list_files) for file in list_files: if type(file) is not dict: continue name_file = file.get('name') if name_file == '__manifest__.py' or name_file == '__openerp__.py': link_to_manifest = file.get('download_url') return link_to_manifest
def lambda_handler(event, context): ACCESS_KEY_ID = 'AKIAIP7E6DYJUA6FZXEQ' ACCESS_SECRET_KEY = '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' BUCKET_NAME = 'seektube' yt_id = event["queryStringParameters"]['id'] yt_url = "https://www.youtube.com/watch?v=" + yt_id yt = YouTube(yt_url) stream = yt.streams.filter(progressive=True, file_extension="mp4").all()[-1] key = yt_id + '.mp4' s3 = boto3.client("s3", aws_access_key_id=ACCESS_KEY_ID, aws_secret_access_key=ACCESS_SECRET_KEY) http = urllib3.PoolManager() s3.upload_fileobj(http.request("GET", stream.url, preload_content=False), BUCKET_NAME, key) message = { "id": yt_id, "title": yt.title, "resolution": stream.resolution, "mime_type": stream.mime_type } return { "statusCode": 200, "body": json.dumps(message), "headers": { "Access-Control-Allow-Origin": "*", "Content-Type": "application/json" } }
def handler(event, context): v_id = event["pathParameters"]["video-id"] yt_url = "https://www.youtube.com/watch?v=" + v_id yt = YouTube(yt_url) stream = yt.streams.filter(progressive=True, file_extension="mp4").all()[-1] bucket = "quickseek" key = v_id + ".mp4" http = urllib3.PoolManager() s3.upload_fileobj(http.request("GET", stream.url, preload_content=False), bucket, key) message = { "id": v_id, "title": yt.title, "resolution": stream.resolution, "mime_type": stream.mime_type } return { "statusCode": 200, "body": json.dumps(message), "headers": { "Access-Control-Allow-Origin": "*", "Content-Type": "application/json" } }
def lambda_handler(event, context): ExtraArgs = { 'ServerSideEncryption': 'aws:kms', 'SSEKMSKeyId': 'alias/<alias_name>' } s3 = s3fs.S3FileSystem(anon=False, s3_additional_kwargs=ExtraArgs) ftpURL = 'localhost' ftpPath = '/test_folder/' s3Bucket = 's3-bucket' folderName = '' filename = 'sample.zip' with FTP(ftpURL) as ftp: ftp.login() ftp.cwd(ftpPath) s3 = boto3.resource('s3') http = urllib3.PoolManager() if filename in ftp.nlst(): ftps_url = 'ftp://' + ftpURL + ftpPath + filename folderName = folderName + filename.rsplit('.', 1)[0] with ZipFile(BytesIO( (urllib.request.urlopen(ftps_url)).read())) as my_zip_file: for contained_file in my_zip_file.namelist(): s3.meta.client.upload_fileobj( my_zip_file.open(contained_file), s3Bucket, folderName + '/' + contained_file, ExtraArgs={ 'ServerSideEncryption': 'aws:kms', 'SSEKMSKeyId': 'alias/<alias_name>' })
def lambda_handler(event, context): s3 = boto3.client('s3') # process each message - up to 10 at same time records = event['Records'] for item in records: # extract information from message country = item['messageAttributes']['country']['stringValue'] state = item['messageAttributes']['state']['stringValue'] URL = item['messageAttributes']['URL']['stringValue'] name = URL.split('/')[-1] # download data from external url bucket = 'apify-wri-pdfs' key = country + "/" + state + "/" + name http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED') try: s3.upload_fileobj(http.request('GET', URL, preload_content=False), bucket, key) except: continue return {'Status': '200'}
def handle(event, context): url = event['Records'][0]['Sns']['MessageAttributes']['TargetUrl']['Value'] if not url: exit(f"Failed to check connection... No url provided.") http = urllib3.PoolManager() r = http.request('GET', url) if r.status >= 300: send_message(url)
def fetch_video(link): v_id = link.split('=')[-1] yt = YouTube(link) stream = yt.streams.filter(progressive=True, file_extension="mp4").all()[-1] key = v_id + '.mp4' http = urllib3.PoolManager() s3.upload_fileobj(http.request('GET', stream.url, preload_content=False), utils.BUCKET, key) return
def get_pull_info(pulls_url, pull): url = pulls_url.replace('{/number}', pull) http = urllib3.PoolManager() res = http.request('GET', url, headers={ 'User-Agent': 'aws lambda handler', 'Authorization': 'token %s' % GITHUB_TOKEN, }) res = json.loads(res.data) logger.debug("Pull info via %s: \n%s", url, json.dumps(res)) return res
def lambda_handler(event, context): url = 'https://download.vulnhub.com/node/Node.ova' # put your url here bucket = 'bucketID' #your s3 bucket key = 'node.ova' #your desired s3 path or filename s3 = boto3.client('s3') http = urllib3.PoolManager() s3.upload_fileobj(http.request('GET', url, preload_content=False), bucket, key)
def notify_ifttt(hook, **data): logger.debug("notify_ifttt: %s", data) http = urllib3.PoolManager() res = http.request( 'POST', hook, body=json.dumps(data), headers={ 'Content-Type': 'application/json', 'User-Agent': 'aws lambda handler', }) return res
def uploadS3(self): s3 = boto3.client('s3') url = self.message['MediaUrl0'] http = urllib3.PoolManager() bucket = 'sns-pictures' #your s3 bucket key = '{0}-{1}/{2}.jpg'.format( self.message['To'].replace('+', ''), self.number.replace('+', ''), url.split('/')[-1]) #your desired s3 path or filename s3_response = s3.upload_fileobj( http.request('GET', url, preload_content=False), bucket, key) return bucket, key
def send_message(url): chatbot_url = os.environ['CHATBOT_URL'] + '/sendMessage' fields = { 'chat_id': os.environ['CHAT_ID'], 'text': f"[Akaito] Server is down!\n{url}", 'parse_mode': 'HTML' } http = urllib3.PoolManager() r = http.request('POST', chatbot_url, fields=fields) if r.status >= 400: exit(f"Failed to send message via telegram... ({r.status} {r.reason})")
def get_status_check_run(owner_base, repo_head, sha_head): # GET /repos/:owner/:repo/commits/:ref/check-runs url = 'https://api.github.com/repos/%s/%s/commits/%s/check-runs' % (owner_base, repo_head, sha_head) http = urllib3.PoolManager() res = http.request('GET', url, headers={ # 'Content-Type': 'application/vnd.github.v3.raw+json', 'User-Agent': 'aws lambda handler', 'Accept': 'application/vnd.github.antiope-preview+json', 'Authorization': 'token %s' % GITHUB_TOKEN, }) res = json.loads(res.data) logger.debug("Status of Check runs: \n%s", json.dumps(res)) return res
def uploadS3(imageUrl, fromNumber): logger.debug( ' ..s3_image_service#uploadS3: imageUrl={}, fromNumber={}'.format( str(imageUrl), fromNumber)) imageName = imageUrl.split('/')[-1] http = urllib3.PoolManager() key = '{0}/{1}-{2}.jpg'.format(folder, fromNumber, imageName) logger.info(' ..bucket={}, key={}'.format(bucket, key)) imageObject = http.request('GET', imageUrl, preload_content=False) s3.upload_fileobj(imageObject, bucket, key) return imageObject, bucket, key
def do_slack_message(results, path): root = "https://s3.{}.amazonaws.com/{}/".format(config.out_region, config.out_bucket_name) url = root + path.replace(" ", "+") all_good = len(results.failures) == 0 and len(results.errors) == 0 if all_good: msg = "All tests have finished successfully." color = "good" errs = "" thumb = root + "success.png" else: msg = "There are failing tests!" color = "danger" mapf = lambda x: "Failed: " + x[0].test_id errs = "\n".join( list(map(mapf, results.failures)) + list(map(mapf, results.errors))) thumb = root + "failed.png" body_data = json.dumps({ "text": msg, "attachments": [{ "color": color, "fallback": "View the test report at " + url, "thumb_url": thumb, "text": errs, "footer": "Jericho Automated Tests by LGSS Digital", "ts": int(time.mktime(datetime.datetime.now().timetuple())), "actions": [{ "type": "button", "text": "View test report", "url": url }] }] }) if config.use_slack: http = urllib3.PoolManager() r = http.request("POST", config.slack_endpoint, headers={'Content-Type': 'application/json'}, body=body_data) return body_data
def test_connections(wait): time.sleep(wait) http = urllib3.PoolManager() try: r = http.request('GET', dimpsey_url) if r.status >= 400: if wait > 5: return False else: return True except: if wait > 5: return False return test_connections(wait + 1)
def lambda_handler(event, context): urllib3.disable_warnings() http = urllib3.PoolManager() response = http.request('GET', url1, preload_content=False, headers={'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}) data = json.loads(response.data.decode('utf-8')) tag = data['tag_name'] location = url2 + tag + "/frida-server-" + tag + "-android-arm.xz" filename = ("frida-server-" + tag + "-android-arm.xz") request = http.request('GET', location, preload_content=False, headers={'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}) fileobj = request path = f"s3://{bucket}/{filename}" # log.info(f"Uploading file to {path}") s3.upload_fileobj(fileobj, bucket, filename) return { "statusCode": 200, "body": json.dumps('Upload Successful') }
def lambda_handler(event, context): now = datetime.datetime.now() timestamp = now.strftime("%d/%m/%Y - %H:%M:%S") print('Pricelist update notification received at ' + str(timestamp)) datestamp = now.strftime("%d.%m.%Y") filename = "Pricing-" + datestamp + ".json" # Download the json file to S3 url = 'https://pricing.us-east-1.amazonaws.com/offers/v1.0/aws/AmazonEC2/current/index.json' http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) s3_bucket = os.environ.get("S3_DESTINATION") s3_client = boto3.client('s3') s3_client.upload_fileobj(http.request('GET', url, preload_content=False), s3_bucket, filename) print("Fle successfully saved to S3") return "Latest EC2 Pricelist successfully saved to S3"
def lambda_handler(event, context): s3Bucket = 's3-bucket' #provide s3 bucket name url = '<internet_url>' #provide internet url to download the file s3 = boto3.resource('s3') http = urllib3.PoolManager() file = url.rsplit('/', 1)[1] urllib.request.urlopen(url) s3.meta.client.upload_fileobj(http.request('GET', url, preload_content=False), s3Bucket, file, ExtraArgs={ 'ServerSideEncryption': 'aws:kms', 'SSEKMSKeyId': 'alias/<alias_name>' }) print('Download completed : ' + file)
def update_review(GITHUB_TOKEN, full_name, pull_number, id_review, review_body): # PUT /repos/:owner/:repo/pulls/:pull_number/reviews/:review_id url = 'https://api.github.com/repos/%s/pulls/%s/reviews/%s' % ( full_name, pull_number, id_review) http = urllib3.PoolManager() body = {'body': review_body} res = http.request( 'PUT', url, headers={ 'Content-Type': 'application/vnd.github.v3.raw+json', 'User-Agent': 'https://gitlab.com/itpp/odoo-devops/blob/master/docs/git/github-review-bot.rst', 'Authorization': 'token %s' % GITHUB_TOKEN, }, body=json.dumps(body)) res = json.loads(res.data) logger.debug("Update review pull request: \n%s", json.dumps(res)) return res
def lambda_handler(event, context): res_dst = os.path.join(config.working_dir, "resource.zip") print(event) t = event["resource"]["type"] if t == "url": http = urllib3.PoolManager() with http.request('GET', event["resource"]["location"], preload_content=False) as resp, open( res_dst, 'wb') as out_file: shutil.copyfileobj(resp, out_file) elif t == "s3": res = event["resource"] s3 = boto3.resource('s3', aws_access_key_id=res['key_id'] if 'key_id' in res else config.src_bucket_key_id, aws_secret_access_key=res['key'] if 'key' in res else config.src_bucket_key) s3.Bucket(event['resource']['bucket']).download_file( event['resource']['file_key'], res_dst) else: raise Exception("Invalid resource type") if os.path.isdir(config.tests_root): shutil.rmtree(config.tests_root) zipfile.ZipFile(res_dst).extractall(path=config.tests_root) test_bootstrap.setenv(event["environment"]) tests_result = run_html_tests() path = save_test_log() slack = do_slack_message(tests_result, path) return { "succeeded": len(tests_result.successes), "failed": len(tests_result.failures), "errors": len(tests_result.errors) }
# SPDX-License-Identifier: Apache-2.0 """ Purpose Shows how to create a fully serverless REST API for Rekognition Content Moderation Solution with URL Support. """ import json import boto3 import botocore.vendored.requests.packages.urllib3 as urllib3 import io client = boto3.client('rekognition') manager = urllib3.PoolManager() def getModerationForUrl(url): try: extensions = ['jpg', 'jpeg', 'png'] if not any(url.lower().endswith(ext) for ext in extensions): return 400, "Amazon Rekognition supports only the following image formats: jpg, jpeg, png" response = manager.request('GET', url, preload_content=False) if response.status == 404: return 404, "Image not found" try: reader = io.BufferedReader(response, 8)
GRAFANA_CONFIG_TEMPLATE = ''' [server] domain = {domain} root_url = %(protocol)s://%(domain)s:/{stage}/grafana [paths] data = {data} logs = /tmp/grafana/logs plugins = {plugins} '''.lstrip() GRAFANA_PIDFILE = '/tmp/grafana.pid' GRAFANA_PROCESS = None # Use retries when proxying requests to the Grafana process, # because it can take a moment for it to start listening. http = urllib3.PoolManager() retry_settings = urllib3.Retry( connect=20, backoff_factor=0.1, ) dynamodb = boto3.client('dynamodb') s3 = boto3.client('s3') @contextmanager def dynamodb_lock(context): """ Lock the data so that only 1 Lambda function can read/write at a time. """
def fetch(): http = urllib3.PoolManager() r = http.request('GET', dimpsey_url) s3.put_object(ACL='public-read', Body = r.data, Bucket = bucket, Key = key, ) return r.data