class S3Driver(Driver): def _setup(self, driver_conf): self._client = Session( aws_access_key_id=driver_conf.access_key, aws_secret_access_key=driver_conf.secret_key, ).resource('s3', endpoint_url=driver_conf.endpoint) try: self._client.Bucket(self._bucket).create() except Exception: pass def _put(self, bucket, key, data): s3_client = self._client.Object(bucket, key) s3_client.upload_fileobj(data) return True def _get(self, bucket, key, output): s3_client = self._client.Object(bucket, key) s3_client.download_fileobj(output) return True
def main(): parser = argparse.ArgumentParser() parser.add_argument('--version', '-v', required=True, help='Version to deploy') args = parser.parse_args() s3 = Session( aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'), aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY'), region_name=os.environ.get('AWS_REGION'), ).resource('s3') bucket = s3.Bucket(os.environ.get('S3_BUCKET_NAME')) files = [ f'amplitude-{args.version}.js', f'amplitude-{args.version}-min.js', f'amplitude-{args.version}.umd.js', f'amplitude-{args.version}-min.umd.js' ] for file in files: if check_exists(s3.Object(os.environ.get('S3_BUCKET_NAME'), os.path.join('libs', file))): sys.exit(f'ERROR: {file} already exists and shouldn\'t be republished. Consider releasing a new version') print(f'Uploading {file}') upload(bucket, file, unzipped_args) gz_files = [ f'amplitude-{args.version}-min.gz.js', f'amplitude-{args.version}-min.umd.gz.js' ] for file in gz_files: if check_exists(s3.Object(os.environ.get('S3_BUCKET_NAME'), file)): sys.exit(f'{file} already exists!') print(f'Uploading {file}') upload(bucket, file, zipped_args) print(f'Success: S3 upload completed. Example: https://cdn.amplitude.com/libs/amplitude-{args.version}.js') return 0
def lambda_handler(event, context): s3 = Session().resource('s3') esh = ESHelper(event) try: esh.validate_event() except AssertionError as err: return esh.return_code(message=err, code=500) post = esh.event()['body']['elements'].get('post') if not post or not post.get('properties') or not post.get( 'properties').get('userId') or not post.get('properties').get( 'location'): return esh.return_code(message='Missing Information', code=404) user_id = post['properties']['userId'] location = post['properties']['location'] videos = post.get('video') images = post.get('image') texts = post.get('text') post_ids = {} for post_type in [videos, images, texts]: if post_type: for type_element in post_type: pId = type_element.get('postId') cId = type_element.get('childId') if cId: corr_post = post_ids.get(pId) children = corr_post.get('children') if children: children.append(type_element) corr_post['children'] = children else: corr_post['children'] = [type_element] elif pId: post_ids[pId] = type_element for pId in post_ids: post_element = post_ids.get(pId) s3object = s3.Object( S3BUCKET, 'public' + '/' + pId + '/' + 'json' + '/' + pId + '.json') try: s3object.put(Body=(bytes(json.dumps(post_element).encode('utf8')))) except Exception as err: print(err) print('Could not put json into s3') postgen = TemplatePost(esh=esh, post_id=pId, post=post_element, user_id=user_id, location=location) try: esh.es().create(index='posts', id=pId, doc_type='_doc', body=postgen.gen_create_post()) except Exception as err: print(err) return esh.return_code(code=400, message='Could not post item') try: esh.es().index(index='events', doc_type='_doc', body=postgen.gen_create_event_location()) except Exception as err: print(err) return esh.return_code(code=400, message='Could not post item') eshtracking = UtilitiesTracking(esh=esh, user_id=user_id, location=location, post=True, active=True, engagement=True) try: eshtracking.update_tracking() except AssertionError as err: print(err) return esh.return_code(code=200, message='OK!')