def test_encode_body_safe_unicode_to_other_bytes(self):
     self.req.body = 'hello'
     self.req.headers['content-type'] = 'text/plain; charset=ascii'
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, b'\x68\x65\x6c\x6c\x6f')
     expected = 'text/plain; charset=ascii'
     self.assertEqual(self.req.headers['content-type'], expected)
 def test_encode_body_unsafe_unicode_to_other_bytes(self):
     self.req.body = '€'
     self.req.headers['content-type'] = 'text/plain; charset=cp1252'
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, b'\x80')
     expected = 'text/plain; charset=cp1252'
     self.assertEqual(self.req.headers['content-type'], expected)
    def test_generate_signature(self):
        """
        Using example data from
        http://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html

        """
        access_key = 'wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY'
        region = 'us-east-1'
        service = 'iam'
        date = '20110909'
        key = AWS4SigningKey(access_key, region, service, date)
        req_text = [
            'POST https://iam.amazonaws.com/ HTTP/1.1',
            'Host: iam.amazonaws.com',
            'Content-Type: application/x-www-form-urlencoded; charset=utf-8',
            'X-Amz-Date: 20110909T233600Z',
            '',
            'Action=ListUsers&Version=2010-05-08']
        req_text = '\n'.join(req_text) + '\n'
        req = request_from_text(req_text)
        del req.headers['content-length']
        include_hdrs = list(req.headers)
        auth = AWS4Auth('dummy', key, include_hdrs=include_hdrs)
        AWS4Auth.encode_body(req)
        hsh = hashlib.sha256(req.body)
        req.headers['x-amz-content-sha256'] = hsh.hexdigest()
        sreq = auth(req)
        signature = sreq.headers['Authorization'].split('=')[3]
        expected = ('ced6826de92d2bdeed8f846f0bf508e8559e98e4b0199114b84c541'
                    '74deb456c')
        self.assertEqual(signature, expected)
 def _test_amz_test_suite_item(self, group_name, group):
     req = request_from_text(group['.req'])
     if 'content-length' in req.headers:
         del req.headers['content-length']
     include_hdrs = list(req.headers)
     AWS4Auth.encode_body(req)
     hsh = hashlib.sha256(req.body or b'')
     req.headers['x-amz-content-sha256'] = hsh.hexdigest()
     result = AWS4Auth.get_canonical_headers(req, include_hdrs)
     cano_headers, signed_headers = result
     cano_req = AWS4Auth.get_canonical_request(req, cano_headers,
                                               signed_headers)
     msg = 'Group: ' + group_name
     self.assertEqual(cano_req, group['.creq'], msg=msg)
    def test_headers_amz_example(self):
        """
        Using example from:
        http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html

        """
        hdr_text = [
            'host:iam.amazonaws.com',
            'Content-type:application/x-www-form-urlencoded; charset=utf-8',
            'My-header1:    a   b   c ',
            'x-amz-date:20120228T030031Z',
            'My-Header2:    "a   b   c"']
        headers = dict([item.split(':') for item in hdr_text])
        req = requests.Request('GET',
                               'http://iam.amazonaws.com',
                               headers=headers)
        req = req.prepare()
        include = list(req.headers)
        result = AWS4Auth.get_canonical_headers(req, include=include)
        cano_headers, signed_headers = result
        expected = [
            'content-type:application/x-www-form-urlencoded; charset=utf-8',
            'host:iam.amazonaws.com',
            'my-header1:a b c',
            'my-header2:"a   b   c"',
            'x-amz-date:20120228T030031Z']
        expected = '\n'.join(expected) + '\n'
        self.assertEqual(cano_headers, expected)
        expected = 'content-type;host;my-header1;my-header2;x-amz-date'
        self.assertEqual(signed_headers, expected)
 def _test_amz_test_suite_item(self, group_name):
     group = amz_aws4_testsuite.data[group_name]
     req = request_from_text(group['.req'])
     if 'content-length' in req.headers:
         del req.headers['content-length']
     include_hdrs = list(req.headers)
     AWS4Auth.encode_body(req)
     hsh = hashlib.sha256(req.body or b'')
     req.headers['x-amz-content-sha256'] = hsh.hexdigest()
     req.headers['x-amz-date'] = amz_aws4_testsuite.timestamp
     key = AWS4SigningKey(amz_aws4_testsuite.access_key,
                          amz_aws4_testsuite.region,
                          amz_aws4_testsuite.service,
                          amz_aws4_testsuite.date)
     auth = AWS4Auth(amz_aws4_testsuite.access_id, key,
                     include_hdrs=include_hdrs)
     sreq = auth(req)
     auth_hdr = sreq.headers['Authorization']
     msg = 'Group: ' + group_name
     self.assertEqual(auth_hdr, group['.authz'], msg=msg)
    def test_netloc_port(self):
        """
        Test that change in d190dcb doesn't regress - strip port from netloc
        before generating signature when Host header is not already present in
        request.

        """
        req = requests.Request('GET', 'http://amazonaws.com:8443')
        preq = req.prepare()
        self.assertNotIn('host', preq.headers)
        result = AWS4Auth.get_canonical_headers(preq, include=['host'])
        cano_hdrs, signed_hdrs = result
        expected = 'host:amazonaws.com\n'
        self.assertEqual(cano_hdrs, expected)
    def test_amz1(self):
        """
        Using example data selected from:
        http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html

        """
        req_text = [
            'POST https://iam.amazonaws.com/ HTTP/1.1',
            'Host: iam.amazonaws.com',
            'Content-Length: 54',
            'Content-Type: application/x-www-form-urlencoded',
            'X-Amz-Date: 20110909T233600Z',
            '',
            'Action=ListUsers&Version=2010-05-08']
        req = request_from_text('\n'.join(req_text))
        AWS4Auth.encode_body(req)
        hsh = hashlib.sha256(req.body)
        req.headers['x-amz-content-sha256'] = hsh.hexdigest()
        include_hdrs = ['host', 'content-type', 'x-amz-date']
        result = AWS4Auth.get_canonical_headers(req, include=include_hdrs)
        cano_headers, signed_headers = result
        expected = [
            'POST',
            '/',
            '',
            'content-type:application/x-www-form-urlencoded',
            'host:iam.amazonaws.com',
            'x-amz-date:20110909T233600Z',
            '',
            'content-type;host;x-amz-date',
            'b6359072c78d70ebee1e81adcbab4f01bf2c23245fa365ef83fe8f1f95'
            '5085e2']
        expected = '\n'.join(expected)
        auth = AWS4Auth('dummy', 'dummy', 'dummy', 'host')
        cano_req = auth.get_canonical_request(req, cano_headers,
                                                  signed_headers)
        self.assertEqual(cano_req, expected)
    def test_duplicate_headers(self):
        """
        Tests case of duplicate headers with different cased names. Uses a
        mock Request object with regular dict to hold headers, since Requests
        PreparedRequest dict is case-insensitive.

        """
        req = SimpleNamespace()
        req.headers = {'ZOO': 'zoobar',
                       'FOO': 'zoobar',
                       'zoo': 'foobar',
                       'Content-Type': 'text/plain',
                       'host': 'dummy'}
        include = [x for x in req.headers if x != 'Content-Type']
        result = AWS4Auth.get_canonical_headers(req, include=include)
        cano_headers, signed_headers = result
        cano_expected = 'foo:zoobar\nhost:dummy\nzoo:foobar,zoobar\n'
        signed_expected = 'foo;host;zoo'
        self.assertEqual(cano_headers, cano_expected)
        self.assertEqual(signed_headers, signed_expected)
Exemplo n.º 10
0
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
import ConfigParser

config = ConfigParser.ConfigParser()
config.read('credentials.txt')

access_token = config.get("AWS", "access_token")
access_token_secret = config.get("AWS", "access_token_secret")
region = config.get("AWS", "region")
host = config.get("AWS", "host")

awsauth = AWS4Auth(access_token, access_token_secret, region, 'es')
es = Elasticsearch(hosts=[{
    'host': host,
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)


def elasticSearch(index, body={}):
    result = es.search(index=index, body=body)
    return result


def elasticInsert(index, doc_type, body):
    result = es.index(index=index, doc_type=doc_type, body=body)
    return result
#secure connection to create an index
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
import boto3

host = 'search-ssm-esdomain-lbyrp7kpnj336o5igvlbow7kiu.eu-central-1.es.amazonaws.com' # For example, my-test-domain.us-east-1.es.amazonaws.com
region = 'eu-central-1' # e.g. us-west-1

service = 'es'

credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service)

print(credentials.access_key)

print(credentials.secret_key)

es = Elasticsearch(
    hosts = [{'host': host, 'port': 443}],
    http_auth = awsauth,
    use_ssl = True,
    verify_certs = True,
    connection_class = RequestsHttpConnection
)

document = {
    "title": "Moneyball",
    "director": "Bennett Miller",
    "year": "2011"
}
 def test_encode_body_unsafe_unicode_to_utf8(self):
     self.req.body = '☃'
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, b'\xe2\x98\x83')
     expected = 'text/plain; charset=utf-8'
     self.assertEqual(self.req.headers['content-type'], expected)
Exemplo n.º 13
0
import curator
from curator.exceptions import NoIndices
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth

logger = logging.getLogger()
logger.setLevel(logging.INFO)

elasticsearch_host = os.environ['ES_HOST']
max_age_days = int(os.environ['MAX_AGE_DAYS'])
dry_run_only = os.environ['DRY_RUN_ONLY'] in ['true', 'True', 'yes']

# do auth because we are outside VPC
aws_auth = AWS4Auth(os.environ['AWS_ACCESS_KEY_ID'],
                    os.environ['AWS_SECRET_ACCESS_KEY'],
                    os.environ['AWS_REGION'],
                    'es',
                    session_token=os.environ['AWS_SESSION_TOKEN'])


# noinspection PyUnusedLocal
def handler(event, context):
    es = Elasticsearch(hosts=[{'host': elasticsearch_host, 'port': 443}],
                       http_auth=aws_auth,
                       use_ssl=True,
                       verify_certs=True,
                       connection_class=RequestsHttpConnection)

    logger.info(f"Connected to Elasticsearch at https://{elasticsearch_host}")

    ilo = curator.IndexList(es)
Exemplo n.º 14
0
import os
import requests
import json
import time
from requests_aws4auth import AWS4Auth
from elasticsearch import Elasticsearch, RequestsHttpConnection
from config import *
import sys
import boto3


REGION = "us-west-2"

awsauth = AWS4Auth(YOUR_ACCESS_KEY, YOUR_SECRET_KEY, REGION, 'sqs')
# awsauth = AWS4Auth(YOUR_ACCESS_KEY, YOUR_SECRET_KEY, "us-east-1", 'es')

# host = os.environ['ES_URL']
# port = os.environ['ES_PORT']

# es = Elasticsearch(
#   hosts=[{
#     'host': host,
#     'port': int(port),
#   }],
#   http_auth=awsauth,
#   use_ssl=True,
#   verify_certs=True,
#   connection_class=RequestsHttpConnection
#   )

# # es.indices.delete(index='news', ignore=[400, 404])
Exemplo n.º 15
0
from datetime import datetime
from requests_aws4auth import AWS4Auth
from awsconfig import ESHOST, REGION
from lineTools import getBotHeader, getUserDisplayName
from nocheckin import aws_access_key_id, aws_secret_access_key, XLineToken, happyrunXLineToken, botannXLineToken, botyunyunXLineToken, botpmXLineToken, botjhcXLineToken
from blackList import badfriends, badwords

min_score = 1.5

#host = 'search-sandyai-mdmcmay32zf36sgmk66tz2v454.us-east-1.es.amazonaws.com'
host = ESHOST
region = REGION

bossid = u'Uc9b95e58acb9ab8d2948f8ac1ee48fad'

awsauth = AWS4Auth(aws_access_key_id, aws_secret_access_key, region, 'es')

es = Elasticsearch(hosts=[{
    'host': host,
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)

learn_triggers = ['590590', u'小安學', u'小安 學']


def responseToToken(replyToken, resp, botid=''):
    #    try:
Exemplo n.º 16
0
# mongo connection
connection = pymongo.MongoClient(
    'mongodb://ec2-52-35-150-57.us-west-2.compute.amazonaws.com:27017/shopping'
)
db = connection.Shopping
p = db.products
count = p.count()

es = Elasticsearch(hosts=[{
    'host':
    'search-shopping-6r2azz6jp5nou4futskmr6rysq.us-west-2.es.amazonaws.com',
    'port': 443
}],
                   http_auth=AWS4Auth(
                       'AKIAJU6K7JYBLFSLSYVA',
                       '9R3gm87XOt78LaXPZ3hXlBThPyLQL/JnmakJZ1mM', 'us-west-2',
                       'es'),
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)

print(es.info())
l = []
cnt = 1
prod = {}

for i in range(1, count):
    res = es.get(index="shopping", doc_type='products', id=i, ignore=[404])
    print res
    if res['found'] == False:
        doc = p.find_one({'_id': i})
Exemplo n.º 17
0
def lambda_handler(event, context):
    bucket = event['Records'][0]['s3']['bucket']['name']
    key = event['Records'][0]['s3']['object']['key']

    # Variables from Lambda Environment
    region = os.environ['REGION']
    access_key = boto3.client('kms').decrypt(
        CiphertextBlob=b64decode(os.environ['ACCESS_KEY']))['Plaintext']
    secret_key = boto3.client('kms').decrypt(
        CiphertextBlob=b64decode(os.environ['SECRET_KEY']))['Plaintext']
    url = 'https://%s/_bulk' % os.environ['ES_ENDPOINT']

    # Auth header for Elastic Search
    auth = AWS4Auth(
        access_key,
        secret_key,
        region,
        'es',
    )

    # Get variables from S3 key
    key_fields = re.search('.*(\d{12})_.*app\.(.+?)\..*(\d{8})T', key)
    account = key_fields.group(1)
    logger.info('Account number derived from S3 Key: %s' % account)
    elb_name = key_fields.group(2)
    logger.info('ELB name derived from S3 Key: %s' % elb_name)
    index_date = '%s.%s.%s' % (key_fields.group(3)[0:4],
                               key_fields.group(3)[4:6],
                               key_fields.group(3)[6:8])
    logger.info('index_date derived from S3 Key: %s' % index_date)

    # Attempt to get tags from the load balancer
    try:
        # Get authentication token for the account/role this elb is in
        sts_client = boto3.client('sts')
        assumedRoleObject = sts_client.assume_role(
            RoleArn=role_mapping % account,
            RoleSessionName="AssumeRoleSession")
        credentials = assumedRoleObject['Credentials']
        elb_client = boto3.client(
            'elbv2',
            aws_access_key_id=credentials['AccessKeyId'],
            aws_secret_access_key=credentials['SecretAccessKey'],
            aws_session_token=credentials['SessionToken'])
        load_balancer = elb_client.describe_load_balancers(Names=[elb_name])
        lb_arn = load_balancer['LoadBalancers'][0]['LoadBalancerArn']
        tag_result = elb_client.describe_tags(ResourceArns=[lb_arn])
        lb_tags = tag_result['TagDescriptions'][0]['Tags']
    except Exception as e:
        logger.warning('Problem getting tags for ELB: %s' % e)
        logger.warning('Account: %s' % account)
        lb_tags = []

    # Retrieve the logfile from S3
    s3 = boto3.resource('s3')
    obj = s3.Object(bucket, key)
    compressed_contents = obj.get()['Body'].read()
    log_txt = gzip.GzipFile(fileobj=StringIO(compressed_contents)).read()

    # Chunk logfile into pieces for bulk submission to ES
    log_list = log_txt.split('\n')
    log_chunk = list(log_list[i:i + logs_per_request]
                     for i in xrange(0, len(log_list), logs_per_request))

    # Create ES index string that prepends each of the document lines for
    # bulk operation
    index_dict = {
        'index': {
            '_index': 'logstash-%s' % index_date,
            '_type': 'elblogs'
        }
    }
    session = requests.Session()
    geo_ip_cache = {}
    ua_cache = {}
    logs_inserted = 0
    for chunk in log_chunk:
        bulk_txt = ''
        reader = csv.DictReader(chunk, log_fields.keys(), delimiter=' ')
        for item in reader:
            try:
                # Fix target_status_code if set to -
                if item['target_status_code'] == '-':
                    item['target_status_code'] = -1
                # Set data types properly for items
                for k, v in item.items():
                    if k in log_fields:
                        item[k] = log_fields[k](v)
                cp = item['client:port'].split(':')
                if len(cp) == 2:
                    item['c-ip'] = cp[0]
                    item['c-port'] = int(cp[1])
                    #item['c-ip'], item['c-port'] = cp
                sp = item['target:port'].split(':')
                if len(sp) == 2:
                    item['s-ip'] = sp[0]
                    item['s-port'] = int(sp[1])
                    #item['s-ip'], item['s-port'] = sp
                cm = item['request'].split()
                if len(cm) == 3:
                    (item['cs-method'], item['cs-uri'],
                     item['cs-protocol-version']) = cm
                us = urlsplit(item['cs-uri'])
                if us.path:
                    item['cs-uri-stem'] = us.path
                else:
                    item['cs-uri-stem'] = '-'
                if us.query:
                    item['cs-uri-query'] = us.query
                else:
                    item['cs-uri-query'] = '-'
                for tag in lb_tags:
                    item['tag.%s' % tag['Key']] = tag['Value']
                item['aws-account'] = account

                if item['c-ip'] not in geo_ip_cache:
                    geo_ip_cache[item['c-ip']] = geo_lookup(item['c-ip'])
                item.update(geo_ip_cache[item['c-ip']])
                if item['cs(User-Agent)'] not in ua_cache:
                    ua_cache[item['cs(User-Agent)']] = ua_lookup(
                        item['cs(User-Agent)'])
                item.update(ua_cache[item['cs(User-Agent)']])
                item['type'] = 'elblogs'
            except Exception as e:
                logger.warning(
                    'Problem adding additional fields to messages: %s' % e)
            bulk_txt += '%s\n%s\n' % (json.dumps(index_dict), json.dumps(item))
            logs_inserted += 1
        amz_date = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
        headers = {
            'Content-Type': 'application/x-ndjson',
            'X-Amz-Date': amz_date
        }
        # Dummy continue to skip posting into ES
        if not bulk_txt:
            continue
        response = session.post(url, data=bulk_txt, headers=headers, auth=auth)
        try:
            response.raise_for_status()
        except Exception as e:
            raise
            logger.warning('Error Reponse Code Received: %s',
                           response.status_code)
            logger.warning('Text: %s', response.content)
            logger.warning('This message was not inserted into elasticsearch')
        else:
            logger.info('Log Event sent to Elastic Search')
    session.close()
    logger.info('%s lines in log file %s' % (len(log_list), key))
    logger.info('Lines inserted into elasticsearch: %s' % logs_inserted)
    obj.delete()
Exemplo n.º 18
0
        outfile.write(
            '{ "index" : { "_index" : "tweets", "_type" : "tweet" } }\n')
        json.dump(new_json, outfile)
        outfile.write('\n')
        es.index(index="tweets", doc_type="tweet", body=new_json)
    return


#################aws setup start########################################
host = 'search-matts-db1-dj6yl5sm7jj5pvdthdjg5czx6e.us-west-1.es.amazonaws.com'  # For example, my-test-domain.us-east-1.es.amazonaws.com
region = 'us-west-1'  # e.g. us-west-1

service = 'es'
access_key = input("enter AWS access key\n")
secret_key = input("enter AWS secret key\n")
awsauth = AWS4Auth(access_key, secret_key, region, service)

es = Elasticsearch(hosts=[{
    'host': host,
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)
#################aws setup end##########################################

#################tweepy setup start#####################################
consumer_key = 'Bxk5A1a0K2p7Y3ZD5PY4qtp2d'
consumer_secret = 'DqCN4XmSYXhvTAUSl9eS3ul3tcOYXBkWjfT8ZqtjUMczapkGtc'
access_token = '858968490941718528-6C7TRZ6jhZAcxjC6jCmXWu9c0zzVLq5'
Exemplo n.º 19
0
from elasticsearch import Elasticsearch, RequestsHttpConnection
from datetime import timedelta, date
from requests_aws4auth import AWS4Auth
import os
import energinet
import es_utils
import settings
import time

host = 'search-homeelasticsearch-bsvbmdkhq4xi7fcchsp3grmjr4.eu-central-1.es.amazonaws.com'
AWS_ACCESS_KEY = os.getenv('AWS_ACCESS_KEY')
AWS_SECRET_KEY = os.getenv('AWS_SECRET_KEY')
AWS_REGION = os.getenv('AWS_REGION')
BASE_URL = 'https://api.eloverblik.dk/CustomerApi/api'
awsauth = AWS4Auth(AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION, 'es')
es = Elasticsearch(hosts=[{
    'host': host,
    'port': 443
}],
                   http_auth=awsauth,
                   scheme='https',
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)

if __name__ == "__main__":
    while True:
        token = energinet.get_token(BASE_URL)
        from_date = (date.today() - timedelta(days=2)).strftime("%Y-%m-%d")
        to_date = (date.today() - timedelta(days=1)).strftime("%Y-%m-%d")
        kwh = energinet.get_kwh(BASE_URL, token, from_date, to_date)
Exemplo n.º 20
0
def connect(
    host: str,
    port: Optional[int] = 443,
    boto3_session: Optional[boto3.Session] = boto3.Session(),
    region: Optional[str] = None,
    username: Optional[str] = None,
    password: Optional[str] = None,
) -> OpenSearch:
    """Create a secure connection to the specified Amazon OpenSearch domain.

    Note
    ----
    We use `opensearch-py <https://github.com/opensearch-project/opensearch-py>`_, an OpenSearch python client.

    The username and password are mandatory if the OS Cluster uses `Fine Grained Access Control \
<https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html>`_.
    If fine grained access control is disabled, session access key and secret keys are used.

    Parameters
    ----------
    host : str
        Amazon OpenSearch domain, for example: my-test-domain.us-east-1.es.amazonaws.com.
    port : int
        OpenSearch Service only accepts connections over port 80 (HTTP) or 443 (HTTPS)
    boto3_session : boto3.Session(), optional
        Boto3 Session. The default boto3 Session will be used if boto3_session receive None.
    region :
        AWS region of the Amazon OS domain. If not provided will be extracted from boto3_session.
    username :
        Fine-grained access control username. Mandatory if OS Cluster uses Fine Grained Access Control.
    password :
        Fine-grained access control password. Mandatory if OS Cluster uses Fine Grained Access Control.

    Returns
    -------
    opensearchpy.OpenSearch
        OpenSearch low-level client.
        https://github.com/opensearch-project/opensearch-py/blob/main/opensearchpy/client/__init__.py
    """
    valid_ports = {80, 443}

    if port not in valid_ports:
        raise ValueError(f"results: port must be one of {valid_ports}")

    if username and password:
        http_auth = (username, password)
    else:
        if region is None:
            region = _utils.get_region_from_session(
                boto3_session=boto3_session)
        creds = _utils.get_credentials_from_session(
            boto3_session=boto3_session)
        if creds.access_key is None or creds.secret_key is None:
            raise exceptions.InvalidArgument(
                "One of IAM Role or AWS ACCESS_KEY_ID and SECRET_ACCESS_KEY must be "
                "given. Unable to find ACCESS_KEY_ID and SECRET_ACCESS_KEY in boto3 "
                "session.")
        http_auth = AWS4Auth(creds.access_key,
                             creds.secret_key,
                             region,
                             "es",
                             session_token=creds.token)
    try:
        es = OpenSearch(
            host=_strip_endpoint(host),
            port=port,
            http_auth=http_auth,
            use_ssl=True,
            verify_certs=True,
            connection_class=RequestsHttpConnection,
            timeout=30,
            max_retries=10,
            retry_on_timeout=True,
        )
    except Exception as e:
        _logger.error(
            "Error connecting to Opensearch cluster. Please verify authentication details"
        )
        raise e
    return es
def lambda_handler(event, context):
    ######################################################################
    # Create and Configure Python logging 
    ######################################################################
    enable_logging = os.getenv('enable_logging')
    if enable_logging == 'True':
        enable_logging = True
        logging.Logger.disabled = False
    else: 
        enable_logging = False
        logging.Logger.disabled = True

    # log = logging.getLogger("accesslogstoawscloud")
    log = logging.getLogger()
    log.setLevel(logging.DEBUG)
    # log.addHandler(handler)
    log.debug("Received event: " + json.dumps(event, indent=2))
    # print("Received event: " + json.dumps(event, indent=2))

    ######################################################################
    # Get all parameters containing credentials for this app
    #   If not -> user credentials from environment variables
    ######################################################################
    parent_stack_name = os.getenv('parent_stack_name')
    try:
        param_name = '/' + parent_stack_name + '/cloud_id'
        param_details = client.get_parameter(Name=param_name,WithDecryption=True)
        if 'Parameter' in param_details and len(param_details.get('Parameter')) > 0:
            parameter = param_details.get('Parameter')
            cloud_id = parameter.get('Value')
            log.info('cloud_id=' + cloud_id)

        param_name = '/' + parent_stack_name + '/http_auth_username'
        param_details = client.get_parameter(Name=param_name,WithDecryption=True)
        if 'Parameter' in param_details and len(param_details.get('Parameter')) > 0:
            parameter = param_details.get('Parameter')
            http_auth_username = parameter.get('Value')
            log.info('http_auth_username='******'/' + parent_stack_name + '/http_auth_password'
        param_details = client.get_parameter(Name=param_name,WithDecryption=True)
        if 'Parameter' in param_details and len(param_details.get('Parameter')) > 0:
            parameter = param_details.get('Parameter')
            http_auth_password = parameter.get('Value')
            log.info('http_auth_password='******'/' + parent_stack_name + '/index_name'
        param_details = client.get_parameter(Name=param_name,WithDecryption=True)
        if 'Parameter' in param_details and len(param_details.get('Parameter')) > 0:
            parameter = param_details.get('Parameter')
            index_name = parameter.get('Value')
            log.info('index_name=' + index_name)

    except:
        log.debug("Encountered an error loading credentials from SSM.")
        traceback.print_exc()
        cloud_id = os.getenv('cloud_id')
        http_auth_username = os.getenv('http_auth_username')
        http_auth_password = os.getenv('http_auth_password')
        index_name = os.getenv('index_name')
        

    ######################################################################
    # Get the object from the event and show its content type
    ######################################################################
    bucket = event['Records'][0]['s3']['bucket']['name']
    key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8')
    try:
        response = s3.get_object(Bucket=bucket, Key=key)
        log.info("CONTENT TYPE: " + response['ContentType'])
    except Exception as e:
        log.debug('Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.'.format(key, bucket))
        log.debug(e)
        # print(e)
        # print('Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.'.format(key, bucket))
        raise e
    
    StreamingBody=response['Body']
    access_log=StreamingBody.read()

    ######################################################################
    # Example Access Log:
    ######################################################################
    # access_log='2279185f7619a617e0a834c7f0660e4b09ea7f842f9d768d39109ee6e4cdf522 bucket [20/Dec/2019:06:36:32 +0000] 174.65.125.92 arn:aws:sts::696965430234:assumed-role/AWSReservedSSO_AdministratorAccess_563d3ebb7af9cd35/[email protected] 6ED2206C36ABCD61 REST.GET.ACL object.mov "GET /bucket/object.mov?acl= HTTP/1.1" 200 - 550 - 277 - "-" "S3Console/0.4, aws-internal/3 aws-sdk-java/1.11.666 Linux/4.9.184-0.1.ac.235.83.329.metal1.x86_64 OpenJDK_64-Bit_Server_VM/25.232-b09 java/1.8.0_232 vendor/Oracle_Corporation" - eGkU7fkbpX9QOfaV1GDHSXQ9zVEokrE0KgIhdVMr63PbSCxWwZoEtr5GDbaDGr1/LFf9lTpiJ3U= SigV4 ECDHE-RSA-AES128-SHA AuthHeader s3-us-west-2.amazonaws.com TLSv1.2\n'
    log.info(f"access_log={access_log}\n")

    f = NamedTemporaryFile(mode='w+', delete=False)
    f.write(str(access_log))
    f.close()
    # with open(f.name, "r") as new_f:
    #     print(new_f.read())

    with open(f.name, "r") as fh:
        for log_entry in s3logparse.parse_log_lines(fh.readlines()):
            log.info(log_entry)

    os.unlink(f.name) # delete the file after usage

    ######################################################################
    # Start the X-Ray sub-segment
    ######################################################################
    subsegment = xray_recorder.begin_subsegment('accesslogstoawscloud - send data to the Elasticsearch Service domain')
    subsegment.put_annotation('function', 'accesslogstoawscloud')
    xray_recorder.put_metadata("access_log", access_log)


    ##################################################################################################
    #Now put that data in ElasticCloud! 
    ##################################################################################################
    host = os.environ.get('host')
    region = os.environ.get('ES_REGION')
    service = 'es'
    credentials = boto3.Session().get_credentials()
    awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)
    # awsauth = AWS4Auth(YOUR_ACCESS_KEY, YOUR_SECRET_KEY, REGION, 'es')

    es = Elasticsearch(
        hosts=[{'host': host, 'port': 443}],
        http_auth=awsauth,
        use_ssl=True,
        verify_certs=True,
        connection_class=RequestsHttpConnection
    )
    log.info(es.info())

    # create an index in elasticsearch, ignore status code 400 (index already exists)
    # es.indices.create(index='accesslogstoawscloud', ignore=400)
    es.indices.create(index=index_name, ignore=400)
    # {'acknowledged': True, 'shards_acknowledged': True, 'index': 'my-index'}
    # datetimes will be serialized
    # es.index(index="my-index", id=44, body={"any": "data44", "timestamp": datetime.now()})
    
    es_body={
    "bucket_owner": log_entry.bucket_owner,
    "bucket": log_entry.bucket,
    "timestamp": log_entry.timestamp,
    "remote_ip": log_entry.remote_ip,
    "requester": log_entry.requester,
    "request_id": log_entry.request_id,
    "operation": log_entry.operation,
    "s3_key": log_entry.s3_key,
    "request_uri": log_entry.request_uri,
    "status_code": log_entry.status_code,
    "error_code": log_entry.error_code,
    "bytes_sent": log_entry.bytes_sent,
    "object_size": log_entry.object_size,
    "total_time": log_entry.total_time,
    "turn_around_time": log_entry.turn_around_time,
    "referrer": log_entry.referrer,
    "user_agent": log_entry.user_agent,
    "version_id": log_entry.version_id
    }

    es.index(index=index_name, body=es_body)



    ######################################################################
    # End the X-Ray sub-segment
    ######################################################################
    xray_recorder.end_subsegment()
def send_a_request(
    test_config,
    url=None,
    method=None,
    payload=None,
    extra_headers=None,
    content_type=None,
):
    print(f"Using test_config: {test_config['name']}")

    headers = {
        "Content-Type": content_type if content_type else "application/json",
    }

    if extra_headers:
        for h in extra_headers:
            headers[h["header_name"]] = h["header_value"]

    if payload:
        body = json.dumps(payload)
    else:
        body = None

    if "CI" in os.environ:
        role_name = "sirius-ci"
    else:
        role_name = "operator"

    boto3.setup_default_session(region_name="eu-west-1", )

    client = boto3.client("sts")
    client.get_caller_identity()["Account"]

    role_to_assume = f"arn:aws:iam::288342028542:role/{role_name}"

    response = client.assume_role(RoleArn=role_to_assume,
                                  RoleSessionName="assumed_role")

    session = Session(
        aws_access_key_id=response["Credentials"]["AccessKeyId"],
        aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
        aws_session_token=response["Credentials"]["SessionToken"],
    )

    client = session.client("sts")
    client.get_caller_identity()["Account"]

    credentials = session.get_credentials()

    credentials = credentials.get_frozen_credentials()
    access_key = credentials.access_key
    secret_key = credentials.secret_key
    token = credentials.token

    auth = AWS4Auth(
        access_key,
        secret_key,
        "eu-west-1",
        "execute-api",
        session_token=token,
    )

    response = requests.request(method,
                                url,
                                auth=auth,
                                data=body,
                                headers=headers)

    print(f"response.status_code: {response.status_code}")
    print(
        f"response: {json.dumps(response.json(), indent=4) if len(response.text) > 0 else ''}"
    )

    return response.status_code, response.text
Exemplo n.º 23
0
# Email stuff
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '******'
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = '*****@*****.**'
SERVER_EMAIL = '*****@*****.**'

# Redirect HTTP to HTTPS
SECURE_SSL_REDIRECT = True

# Search settings
import elasticsearch
from requests_aws4auth import AWS4Auth
awsauth = AWS4Auth(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, 'us-east-1', 'es')

HAYSTACK_CONNECTIONS = {
    'default': {
        'ENGINE':
        'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
        'URL':
        'https://search-advo-search-tdbbozoee2qmgiowsicqdjyfze.us-east-1.es.amazonaws.com',
        'INDEX_NAME': 'haystack',
        'KWARGS': {
            'port': 443,
            'http_auth': awsauth,
            'use_ssl': True,
            'verify_certs': True,
            'connection_class': elasticsearch.RequestsHttpConnection
        },
Exemplo n.º 24
0
def get_client(**kwargs):
    """
    Return an :class:`elasticsearch.Elasticsearch` client object using the
    provided parameters. Any of the keyword arguments the
    :class:`elasticsearch.Elasticsearch` client object can receive are valid,
    such as:

    :arg hosts: A list of one or more Elasticsearch client hostnames or IP
        addresses to connect to.  Can send a single host.
    :type hosts: list
    :arg port: The Elasticsearch client port to connect to.
    :type port: int
    :arg url_prefix: `Optional` url prefix, if needed to reach the Elasticsearch
        API (i.e., it's not at the root level)
    :type url_prefix: str
    :arg use_ssl: Whether to connect to the client via SSL/TLS
    :type use_ssl: bool
    :arg certificate: Path to SSL/TLS certificate
    :arg client_cert: Path to SSL/TLS client certificate (public key)
    :arg client_key: Path to SSL/TLS private key
    :arg aws_key: AWS IAM Access Key (Only used if the :mod:`requests-aws4auth`
        python module is installed)
    :arg aws_secret_key: AWS IAM Secret Access Key (Only used if the
        :mod:`requests-aws4auth` python module is installed)
    :arg aws_region: AWS Region (Only used if the :mod:`requests-aws4auth`
        python module is installed)
    :arg ssl_no_validate: If `True`, do not validate the certificate
        chain.  This is an insecure option and you will see warnings in the
        log output.
    :type ssl_no_validate: bool
    :arg http_auth: Authentication credentials in `user:pass` format.
    :type http_auth: str
    :arg timeout: Number of seconds before the client will timeout.
    :type timeout: int
    :arg master_only: If `True`, the client will `only` connect if the
        endpoint is the elected master node of the cluster.  **This option does
        not work if `hosts` has more than one value.**  It will raise an
        Exception in that case.
    :type master_only: bool
    :rtype: :class:`elasticsearch.Elasticsearch`
    """
    if 'url_prefix' in kwargs:
        if (type(kwargs['url_prefix']) == type(None)
                or kwargs['url_prefix'] == "None"):
            kwargs['url_prefix'] = ''
    if 'host' in kwargs and 'hosts' in kwargs:
        raise ConfigurationError(
            'Both "host" and "hosts" are defined.  Pick only one.')
    elif 'host' in kwargs and not 'hosts' in kwargs:
        kwargs['hosts'] = kwargs['host']
        del kwargs['host']
    kwargs['hosts'] = '127.0.0.1' if not 'hosts' in kwargs else kwargs['hosts']
    kwargs['master_only'] = False if not 'master_only' in kwargs \
        else kwargs['master_only']
    kwargs['use_ssl'] = False if not 'use_ssl' in kwargs else kwargs['use_ssl']
    kwargs['ssl_no_validate'] = False if not 'ssl_no_validate' in kwargs \
        else kwargs['ssl_no_validate']
    kwargs['certificate'] = False if not 'certificate' in kwargs \
        else kwargs['certificate']
    kwargs['client_cert'] = False if not 'client_cert' in kwargs \
        else kwargs['client_cert']
    kwargs['client_key'] = False if not 'client_key' in kwargs \
        else kwargs['client_key']
    kwargs['hosts'] = ensure_list(kwargs['hosts'])
    logger.debug("kwargs = {0}".format(kwargs))
    master_only = kwargs.pop('master_only')
    if kwargs['use_ssl']:
        if kwargs['ssl_no_validate']:
            kwargs[
                'verify_certs'] = False  # Not needed, but explicitly defined
        else:
            logger.debug('Attempting to verify SSL certificate.')
            # If user provides a certificate:
            if kwargs['certificate']:
                kwargs['verify_certs'] = True
                kwargs['ca_certs'] = kwargs['certificate']
            else:  # Try to use certifi certificates:
                try:
                    import certifi
                    kwargs['verify_certs'] = True
                    kwargs['ca_certs'] = certifi.where()
                except ImportError:
                    logger.warn('Unable to verify SSL certificate.')
    try:
        from requests_aws4auth import AWS4Auth
        kwargs['aws_key'] = False if not 'aws_key' in kwargs \
            else kwargs['aws_key']
        kwargs['aws_secret_key'] = False if not 'aws_secret_key' in kwargs \
            else kwargs['aws_secret_key']
        kwargs['aws_region'] = False if not 'aws_region' in kwargs \
            else kwargs['aws_region']
        if kwargs['aws_key'] or kwargs['aws_secret_key'] or kwargs['region']:
            if not kwargs['aws_key'] and kwargs['aws_secret_key'] \
                    and kwargs['aws_region']:
                raise MissingArgument(
                    'Missing one or more of "aws_key", "aws_secret_key", '
                    'or "aws_region".')
            # Override these kwargs
            kwargs['use_ssl'] = True
            kwargs['verify_certs'] = True
            kwargs['connection_class'] = elasticsearch.RequestsHttpConnection
            kwargs['http_auth'] = (AWS4Auth(kwargs['aws_key'],
                                            kwargs['aws_secret_key'],
                                            kwargs['aws_region'], 'es'))
        else:
            logger.debug('"requests_aws4auth" module present, but not used.')
    except ImportError:
        logger.debug('Not using "requests_aws4auth" python module to connect.')

    if master_only:
        if len(kwargs['hosts']) > 1:
            raise ConfigurationError(
                '"master_only" cannot be True if more than one host is '
                'specified. Hosts = {0}'.format(kwargs['hosts']))
    try:
        client = elasticsearch.Elasticsearch(**kwargs)
        # Verify the version is acceptable.
        check_version(client)
        # Verify "master_only" status, if applicable
        check_master(client, master_only=master_only)
        return client
    except Exception as e:
        raise elasticsearch.ElasticsearchException(
            'Unable to create client connection to Elasticsearch.  '
            'Error: {0}'.format(e))
Exemplo n.º 25
0
if __name__ == "__main__":

    urllib3.disable_warnings()
    aligo_configuration = get_aligo_configuration()

    # Pricing API is only available us-east-1
    client = boto3.client('pricing', region_name='us-east-1')
    accounting_log_path = '/var/spool/pbs/server_priv/accounting/'
    # Change PyTZ as needed
    tz = pytz.timezone('America/Los_Angeles')
    session = boto3.Session()
    credentials = session.get_credentials()
    awsauth = AWS4Auth(credentials.access_key,
                       credentials.secret_key,
                       session.region_name,
                       'es',
                       session_token=credentials.token)
    es_endpoint = 'https://' + aligo_configuration['ESDomainEndpoint']
    es = Elasticsearch([es_endpoint],
                       port=443,
                       http_auth=awsauth,
                       use_ssl=True,
                       verify_certs=True,
                       connection_class=RequestsHttpConnection)

    pricing_table = {}
    management_chain_per_user = {}
    json_output = []
    output = {}
    "Region": 'us-east-1',
    "headers": {
        "content-type": "application/json"
    },
    "geoip_headers": {
        'accept': "application/json",
        'content-type': "application/json"
    }
}

creds = boto3.Session(aws_access_key_id=access_key,
                      aws_secret_access_key=secret_key).get_credentials()

awsauth = AWS4Auth(creds.access_key,
                   creds.secret_key,
                   'us-east-1',
                   'es',
                   session_token=creds.token)


class GetGeo():
    def __init__(self, logDocTrimmed_dict, cip):
        self.LogDocTrimmed_dict = logDocTrimmed_dict
        self.cip = cip
        try:
            geo_url = "https://freegeoip.app/json" + \
                '/' + self.cip
            geo_resp = requests.request("GET",
                                        geo_url,
                                        headers=params.get("geoip_headers"))
def lambda_handler(event, context):
    sqs_client = boto3.client('sqs')
    
    try:
        # polls a message from the SQS queue
        response = sqs_client.receive_message(
            QueueUrl='https://sqs.us-east-1.amazonaws.com/974283779235/DiningQueue',
            MessageAttributeNames=['All'],
            MaxNumberOfMessages=1
        )
        
        sqs_client.delete_message(
            QueueUrl='https://sqs.us-east-1.amazonaws.com/974283779235/DiningQueue',
            ReceiptHandle=response['Messages'][0]['ReceiptHandle']
        )
    except KeyError:
        return {
            'statusCode': 404,
            'body': "Empty SQS Queue."
        }
    
    # this is the data we need
    restaurant_requirements = response['Messages'][0]['MessageAttributes']
    cuisine_type = restaurant_requirements['Cuisine']['StringValue'].lower()
    people_number = restaurant_requirements["PeopleNumber"]["StringValue"]
    time = restaurant_requirements["DiningTime"]["StringValue"]
    phone_number = restaurant_requirements["PhoneNumber"]["StringValue"]
    
    
    host = 'search-nyc-restaurants-3xe6tomp3hbigddvvoja4qvmxq.us-east-1.es.amazonaws.com' 
    region = 'us-east-1'
    service = 'es'
    credentials = boto3.Session().get_credentials()
    awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service, session_token=credentials.token)

    es = Elasticsearch(
        hosts = [{'host': host, 'port': 443}],
        http_auth = awsauth,
        use_ssl = True,
        verify_certs = True,
        connection_class = RequestsHttpConnection
    )
    
    result = es.search(index="restaurants", body={"query": {"match":{"cuisine": cuisine_type}}})
    restaurants_that_match_cuisine_type = result['hits']['hits']
    
    dynamodb_client = boto3.client('dynamodb')
    restaurant_data = []
    
    # get info on the first three results returned by elasticsearch
    for restaurant_num in [0,1,2]:
        res_id = restaurants_that_match_cuisine_type[restaurant_num]['_id']
        res_details = dynamodb_client.get_item(
            TableName='yelp-restaurants',
            Key={'id':{'S':str(res_id)}}
        )
        restaurant_data.append(res_details)

    text_message = "Hello! Here are my " + cuisine_type + " restaurant suggestions for "
    text_message += people_number + " people for today at " + time + ":\n"
    
    for r in restaurant_data:
        name = r['Item']['name']['S']
        address = r['Item']['address']['S']
        text_message += name + ", located at " + address +"\n"

    text_message += "Enjoy your meal!"
    
    sns_client = boto3.client('sns')

    response = sns_client.publish(
        PhoneNumber="+" + phone_number,
        Message=text_message,
    )
    
    return {
        'statusCode': 200,
        'body': text_message
    }
Exemplo n.º 28
0
def main():
    # Setup environment
    region = os.environ['RT_REGION']
    workshop_name = os.environ['WORKSHOP_NAME']
    gql_endpoint = os.environ['GQL_ENDPOINT']
    gql_assume_role = os.environ['GQL_ROLE']
    # for now content handle only workshop, in future will be added delivery 
    content_id = workshop_name
    content_structures = []


    # Analize languages in toml file
    dict_toml = toml.load(open('./config.toml'))
    number_of_languages = len(dict_toml['Languages'])
    for i, lang in enumerate(dict_toml['Languages']):
        print('Load JSON structure\n')
        if number_of_languages == 1:
            index_location = "./public/index.json"
        else:
            index_location = "./public/" + lang +"/index.json"
        with open(index_location, 'r') as j:
            current_structure = json.load(j)
        print('Finished Load JSON\n')
        content_structures.append({"language": lang, "structure": current_structure})


    # Change json format from Python to Javascript because GraphQL can only recognize Javascript type of Json object.
    # Javascript Json object {key: "value"} ; Python Json object {"key":"value"}
    # TODO: Json_keys is just hardcoded. It should find keys in JSON object.
    json_keys = ["language", "structure", "pageTitle", "relativePagePath"]
    structures_str = str(content_structures)
    for key in json_keys:
        target_key = "\'"+key+"\'"
        structures_str = structures_str.replace(target_key, key)
    structures_str = structures_str.replace("\'", "\"")


    # AssumeRole
    sts_client=boto3.client('sts')
    assume_role=sts_client.assume_role(
        RoleArn=gql_assume_role,
        RoleSessionName='GraphQLExecuter'
        )
    access_key_id=assume_role['Credentials']['AccessKeyId']
    secret_access_key=assume_role['Credentials']['SecretAccessKey']
    session_token = assume_role['Credentials']['SessionToken']
    
    # # Setting Sigv4
    # uri = os.environ.get('AWS_CONTAINER_CREDENTIALS_RELATIVE_URI')
    # credential = ContainerMetadataFetcher().retrieve_uri(uri)
    # access_key_id = credential.get('AccessKeyId')
    # secret_access_key = credential.get('SecretAccessKey')
    # session_token = credential.get('Token')

    
    auth = AWS4Auth(access_key_id, secret_access_key, region, 'appsync', session_token=session_token)

    # Load latest version
    body = {"query":""""
                    query ListContents{	
                    listContents(
                            hostName: "%s",
                            limit: 1,
                            sortDirection: DESC
                    ) {
                            items{
                        hostName
                        version
                        }
                    } 
                    }
                    """%workshop_name
            }
            
    body_json = json.dumps(body)
    method = 'POST'
    headers = {}
    response = requests.request(method, gql_endpoint, auth=auth, data=body_json, headers=headers)
    print(response.content.decode('utf-8'))
    gql_data = json.loads(response.content.decode('utf-8'))['data']['listContents']['items'][0]
    current_version = gql_data['version']
    
    # Send latest version
    body = {"query":""""
            mutation createContent{
            createContent(input:{
                hostName: "%s"
                version: %d
                structures: %s
            }){
                version
            }
            }
            """% (content_id, current_version,structures_str)
    }
    body_json = json.dumps(body)
    response = requests.request(method, gql_endpoint, auth=auth, data=body_json, headers=headers)


    # # Send structure data to DynamoDB Table
    # print('Start to send structure data\n')
    # query_response = version_table.query(
    #     KeyConditionExpression=Key('content_id').eq(content_id),
    #     ScanIndexForward = False,
    #     Limit = 1 
    #     )
    # # Current version is already updated in pre_build.py 
    # current_version = int(query_response['Items'][0]['version'])
    # unix_timestamp = datetime.datetime.now().strftime('%s')
    # update_response = version_table.update_item(
    #         Key = 
    #         {
    #             'content_id': content_id,
    #             'version' : current_version
    #         },
    #         UpdateExpression='SET updated_at = :val1, structure = :val2',
    #         ExpressionAttributeValues={
    #             ':val1': unix_timestamp,
    #             ':val2': structure_json
    #         }
    #     )
    print('Finished sending sructure\n')

    print('Post build phase done\n')
Exemplo n.º 29
0
def lambda_handler(event, context):
    bucket = event['Records'][0]['s3']['bucket']['name']
    key = event['Records'][0]['s3']['object']['key']
    host = os.environ['HOST']
    region = os.environ['REGION']
    index_date = re.search('.*([0-9]{8})T.*', key).group(1)
    url = 'https://%s/_bulk' % host
    if 'Digest' in key:
        logger.info("Digest File... Skipping")
        return
    s3 = boto3.resource('s3')
    obj = s3.Object(bucket, key)
    compressed_contents = obj.get()['Body'].read()
    contents = zlib.decompress(compressed_contents, 16 + zlib.MAX_WBITS)
    records = json.loads(contents)["Records"]
    logchunk = list(records[i:i + logs_per_request]
                    for i in xrange(0, len(records), logs_per_request))
    credentials = boto3.Session().get_credentials()
    auth = AWS4Auth(credentials.access_key,
                    credentials.secret_key,
                    region,
                    'es',
                    session_token=credentials.token)

    index_dict = {
        'index': {
            '_index': 'logstash-%s' % index_date,
            '_type': 'iislog'
        }
    }
    session = requests.Session()
    for chunk in logchunk:
        bulk_txt = ''
        for item in chunk:
            try:
                logging.info('Sending event to elasticsearch')
                item["@timestamp"] = item["eventTime"]
                item["eventSource"] = item["eventSource"].split(".")[0]
                index_date = item["eventTime"].split("T")[0].replace("-", ".")
                index_dict = {
                    'index': {
                        '_index': 'logstash-%s' % index_date,
                        '_type': 'cloudtrail'
                    }
                }
                bulk_txt += '%s\n%s\n' % (json.dumps(index_dict),
                                          json.dumps(item))
            except Exception as e:
                logger.warning('Unable to process log entry: %s' % item)
                logger.warning('Exception: %s' % e)
        amz_date = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
        headers = {
            'Content-Type': 'application/x-ndjson',
            'X-Amz-Date': amz_date
        }
        response = session.post(url, data=bulk_txt, headers=headers, auth=auth)
        try:
            response.raise_for_status()
        except Exception as e:
            logger.warning('Error Reponse Code Received: %s',
                           response.status_code)
            logger.warning('Reponse Text: %s', response.content)
            logger.warning('This message was not inserted into elasticsearch')
        else:
            logger.info('Log Event sent to Elastic Search')
    session.close()
Exemplo n.º 30
0
import boto3
import requests
from requests_aws4auth import AWS4Auth

region = 'cn-north-1'  # e.g. us-east-1
service = 'es'
credentials = boto3.Session().get_credentials()
awsauth = AWS4Auth(credentials.access_key, credentials.secret_key,
                   region, service, session_token=credentials.token)

host = ''  # the Amazon ES domain, with https://
index = 'lambda-index'
type = 'lambda-type'
url = host + '/' + index + '/' + type + '/'

headers = {"Content-Type": "application/json"}


def handler(event, context):
    count = 0
    for record in event['Records']:
        # Get the primary key for use as the Elasticsearch ID
        id = record['dynamodb']['Keys']['recordId']['S']

        if record['eventName'] == 'REMOVE':
            r = requests.delete(url + id, auth=awsauth)
        else:
            document = record['dynamodb']['NewImage']
            print(document)
            r = requests.put(url + id, auth=awsauth,
                             json=document, headers=headers)
Exemplo n.º 31
0
    def process(self, count, payload, id):
        if count > self.clip_min_frames:  # If the buffer is less than CLIP_MIN_MS, ignore it
            # if 1: (to create recordings, foder ./recordings must exist)
            if logging.getLogger(
            ).level == 10:  # if we're in Debug then save the audio clip
                fn = "{}rec-{}-{}.wav".format(
                    './recordings/', id,
                    datetime.datetime.now().strftime("%Y%m%dT%H%M%S"))
                output = wave.open(fn, 'wb')
                output.setparams(
                    (1, 2, self.rate, 0, 'NONE', 'not compressed'))
                output.writeframes(payload)
                output.close()
                debug('File written {}'.format(fn))
            auth = AWS4Auth(self._aws_id,
                            self._aws_secret,
                            self._aws_region,
                            'lex',
                            unsign_payload=True)

            info(">>> auth:")
            info(auth)

            info('Processing {} frames for {}'.format(str(count), id))
            endpoint = 'https://runtime.lex.{}.amazonaws.com{}'.format(
                self._aws_region, self._path)
            info(endpoint)
            if self.rate == 16000:
                headers = {
                    'Content-Type': 'audio/l16; channels=1; rate=16000',
                    'Accept': 'audio/pcm'
                }
            elif self.rate == 8000:
                headers = {
                    'Content-Type':
                    'audio/lpcm; sample-rate=8000; sample-size-bits=16; channel-count=1; is-big-endian=false',
                    'Accept': 'audio/pcm'
                }
            else:
                info("Unsupported Sample Rate: % ".format(self.rate))
            req = requests.Request('POST',
                                   endpoint,
                                   auth=auth,
                                   headers=headers)
            prepped = req.prepare()
            info(prepped.headers)
            r = requests.post(endpoint, data=payload, headers=prepped.headers)
            info(r.headers)

            self.customer_transcript = r.headers.get(
                'x-amz-lex-input-transcript')
            self.bot_transcript = r.headers.get('x-amz-lex-message')
            self.session_id = r.headers.get('x-amz-lex-session-id')

            if (r.headers.get('x-amz-lex-sentiment')):
                self.customer_sentiment = b64decode(
                    r.headers['x-amz-lex-sentiment']).decode('ascii')

                self.analytics_raw = {
                    "customer_transcript": str(self.customer_transcript),
                    "bot_transcript": str(self.bot_transcript),
                    "customer_sentiment": json.loads(self.customer_sentiment),
                    "session_id": self.session_id,
                    "client_id": self.client_id,
                    "service": "Amazon Lex"
                }

            else:
                self.customer_sentiment = "Sentiment analysis is not enabled on this Lex bot or customer_transcript is empty"

                self.analytics_raw = {
                    "customer_transcript": str(self.customer_transcript),
                    "bot_transcript": str(self.bot_transcript),
                    "customer_sentiment": self.customer_sentiment,
                    "session_id": self.session_id,
                    "client_id": self.client_id,
                    "service": "Amazon Lex"
                }

            self.analytics = json.dumps(self.analytics_raw)

            info(self.analytics)

            # Posting to analytics server
            if (self.webhook_url):
                a = requests.post(self.webhook_url,
                                  data=self.analytics,
                                  headers={'Content-Type': 'application/json'})

            self.playback(r.content, id)
            if r.headers.get('x-amz-lex-session-attributes'):
                if json.loads(
                        b64decode(
                            r.headers['x-amz-lex-session-attributes'])).get(
                                'nexmo-close'):
                    conns[id].close()
        else:
            info('Discarding {} frames'.format(str(count)))
Exemplo n.º 32
0
 def __init__(self, stack_resources, testing_env_variables):
     self.env_vars = testing_env_variables
     self.stack_resources = stack_resources
     self.auth = AWS4Auth(testing_env_variables['ACCESS_KEY'],
                          testing_env_variables['SECRET_KEY'],
                          testing_env_variables['REGION'], 'execute-api')
Exemplo n.º 33
0
import os

import boto3
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth

host = os.getenv("ES_HOST", "localhost")

if host == "localhost":
    ES = Elasticsearch()
else:
    credentials = boto3.Session().get_credentials()
    awsauth = AWS4Auth(
        credentials.access_key,
        credentials.secret_key,
        os.getenv("AWS_DEFAULT_REGION", "us-east-1"),
        "es",
        session_token=credentials.token,
    )
    ES = Elasticsearch(
        hosts=[{
            "host": host,
            "port": 443
        }],
        http_auth=awsauth,
        use_ssl=True,
        verify_certs=True,
        connection_class=RequestsHttpConnection,
    )

ES.index(
Exemplo n.º 34
0
from elasticsearch import Elasticsearch, exceptions, RequestsHttpConnection
import requests
from requests_aws4auth import AWS4Auth

WORKERS = 10
API_URL = "http://gateway-a.watsonplatform.net/calls/text/TextGetTextSentiment"
API_TOKEN = "0ac5fb44df7c0b67834d33197cb4117472020536"
QUEUE_NAME = "tweetsQueue"
WAIT_TIME = 10  # time to wait between each SQS poll
TOPIC_NAME = "tweet-topic"
SNS_ARN = "arn:aws:sns:us-east-1:648564116187:tweet-topic"
aws_access_key_id = "AKIAIXLMABEUDHWWG2KA"
aws_secret_access_key = "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"
REGION = "us-east-1"

awsauth = AWS4Auth(aws_access_key_id, aws_secret_access_key, REGION, 'es')
sqs = boto3.resource('sqs')
queue = sqs.get_queue_by_name(QueueName=QUEUE_NAME)
sns = boto3.client('sns')

es = Elasticsearch(hosts=[{
    'host':
    'search-tweet-qtio2avcrkgm4l6svwa2kfu3b4.us-east-1.es.amazonaws.com',
    'port': 443
}],
                   use_ssl=True,
                   http_auth=awsauth,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)

Exemplo n.º 35
0
from elasticsearch import Elasticsearch,RequestsHttpConnection
from requests_aws4auth import AWS4Auth
import boto3
import json
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)

session = boto3.Session()
credentials = session.get_credentials()
access_key = credentials.access_key
secret_key = credentials.secret_key
region = 'us-east-1'

host = 'search-proj-s32olh4w37jhobjg6e45qcm6ca.us-east-1.es.amazonaws.com'
awsauth = AWS4Auth(access_key,secret_key, region, 'es',session_token=credentials.token)

def lambda_handler(event,context):
    # resp = {
    #     'headers': {
    #         "Access-Control-Allow-Origin": "*",
    #         "Content-Type": "text/plain"
    #     },
        
    #     'statusCode': 200,
    #     'body': event["queryStringParameters"]
        
    # }
    
    # return resp
    es = Elasticsearch(
 def test_encode_body_bytes(self):
     text = b'hello'
     self.req.body = text
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, text)
     self.assertEqual(self.req.headers, {})
Exemplo n.º 37
0
import json
import datetime
import time
import sys

from elasticsearch import Elasticsearch
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth

# Configuration code for AWS and elastic search
AWS_ACCESS_KEY = ''
AWS_SECRET_KEY = ''
region = 'us-east-2'  # For example, us-east-1
service = 'es'

awsauth = AWS4Auth(AWS_ACCESS_KEY, AWS_SECRET_KEY, region, service)

host = 'search-tweetsentiment-qgnbjonsbxhe6v4btn5chvrpgy.us-east-2.es.amazonaws.com'

#ElasticSearch object
es = Elasticsearch(hosts=[{
    'host': host,
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection,
                   timeout=10)

es.indices.delete(index='tweet_sentiment', ignore=[400, 404])
Exemplo n.º 38
0
def lambda_handler(event, context):
    # TODO implement
    bot = boto3.client('lex-runtime', region_name = 'us-east-1')
    
    text = event['q']
    
    ###### for lex #####
    response = bot.post_text(
                             botName='SearchPhotos',
                             botAlias='$LATEST',
                             userId='test',
                             sessionAttributes={
                             'id': 'user1'
                             },
                             requestAttributes={},
                             inputText=text
                             )
                             keywords = []
                             
                             #### for elastic search ####
                             
                             host = # For example, my-test-domain.us-east-1.es.amazonaws.com
                             region = 'us-east-2' # e.g. us-west-1
                             
                             service = 'es'
                             #credentials = boto3.Session().get_credentials()
                             #awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, region, service)
                             
                             
                             awsauth = AWS4Auth(my_access_key, my_secret_key,
                                                region, service)
                             es = Elasticsearch(
                                                hosts = [{'host': host, 'port': 443}],
                                                http_auth = awsauth,
                                                use_ssl = True,
                                                verify_certs = True,
                                                connection_class = RequestsHttpConnection
                                                )
                             
                             if response['dialogState'] == 'ReadyForFulfillment':
                                 #print("entered1")
                                 keywords.append(response['slots']['labelOne'])
                                 if response['slots']['labelTwo']:
                                     keywords.append(response['slots']['labelTwo'])
                                         print(keywords)
                                         #search for index
                                         results = []
                                         for key in keywords:
                                             body = {
                                                 "query":{
                                                     "match":{
                                                         "labels":key
                                                         }
                                                         }
                                                             }
                                                                 search_result = es.search(index="photos", doc_type="_doc", body=body)
                                                                 results.append(search_result)
                                                                     #print(results)
                                                                     final_response = []
                                                                     i = 0
                                                                         for result in results:
                                                                             result = result['hits']['hits'][i]['_source']
                                                                             url = "/"+result['objectKey']
                                                                             labels = result['labels']
                                                                             final = {
                                                                                 "url":url,
                                                                                     "labels":labels
                                                                                         }
                                                                                             final_response.append(final)
                                                                                             i+=1
                                                                                                 print(final_response)
                                                                                                 final_response = {
                                                                                                     "results":final_response
                                                                                                     }
Exemplo n.º 39
0
from __future__ import print_function
from elasticsearch import Elasticsearch, helpers, RequestsHttpConnection
from requests_aws4auth import AWS4Auth

import json
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)

awsauth = AWS4Auth("AKIAIATAPMMA73TOWOVA",
                   "qCrRf8GKiJBlNuMnzIaAYBZ9aTAy/PsoqqkI9Crk", 'us-east-1',
                   'es')
es = Elasticsearch(hosts=[{
    'host':
    "search-twittmap-for-tory-crewiplf7dscv7b6caobny7wui.us-east-1.es.amazonaws.com",
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection,
                   timeout=10)


def lambda_handler(event, context):
    data = json.dumps(event, indent=2)
    data = json.loads(data)
    logger.info("value latitude: " + data['Records'][0]['Sns']['Message'])
    message = data['Records'][0]['Sns']['Message']
    logger.info(es.index(index="tweets", doc_type='tweet', id=1, body=message))
    #raise Exception('Something went wrong')
 def test_encode_body_unicode_to_bytes(self):
     self.req.body = u('hello')
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, b'\x68\x65\x6c\x6c\x6f')
     expected = 'text/plain; charset=utf-8'
     self.assertEqual(self.req.headers['content-type'], expected)
def search(request):

    host = request["elasticsearchDomain"]
    keyword = request["keyword"] if "keyword" in request else None
    documentId = request["documentId"] if "documentId" in request else None

    output = request

    if (keyword is not None):
        searchBody = {"must": [{"match": {"content": keyword}}]}

        if (documentId is not None):
            searchBody["must"].append(
                {"match_phrase": {
                    "documentId": documentId
                }})

        service = 'es'
        ss = boto3.Session()
        credentials = ss.get_credentials()
        region = ss.region_name

        awsauth = AWS4Auth(credentials.access_key,
                           credentials.secret_key,
                           region,
                           service,
                           session_token=credentials.token)

        print(searchBody)

        es = Elasticsearch(hosts=[{
            'host': host,
            'port': 443
        }],
                           http_auth=awsauth,
                           use_ssl=True,
                           verify_certs=True,
                           connection_class=RequestsHttpConnection)
        output = es.search(index='textract',
                           doc_type="document",
                           body={"query": {
                               "bool": searchBody
                           }},
                           filter_path=[
                               'hits.hits._id', 'hits.hits._source.content',
                               'hits.hits._source.name',
                               'hits.hits._source.bucket'
                           ])

        if ("hits" in output):
            output = output["hits"]
            # subnested hits
            hits = output["hits"]
            results = []

            i = 0
            while i < len(hits):
                id = hits[i]["_id"]
                source = hits[i]["_source"]
                linesContext = context(keyword, source["content"])
                joined = ' ...'.join(linesContext)
                obj = {
                    "documentId": id,
                    "name": source["name"],
                    "bucket": source["bucket"],
                    "count": contextCounts(keyword, source["content"]),
                    "brief": joined[:400],
                    "lines": linesContext
                }
                results.append(obj)
                i += 1
                if ('documentId' in request):
                    if (request['documentId'] == id):
                        results = obj
                        i = len(hits)

            output = results

        return output
 def test_encode_body_utf8_string_to_bytes(self):
     self.req.body = u('☃')
     AWS4Auth.encode_body(self.req)
     self.assertEqual(self.req.body, b'\xe2\x98\x83')
     expected = 'text/plain; charset=utf-8'
     self.assertEqual(self.req.headers['content-type'], expected)
import csv
from elasticsearch import helpers, Elasticsearch, RequestsHttpConnection
import io
import pycallnumber as pycn
from requests_aws4auth import AWS4Auth
import yaml

credentials = boto3.Session().get_credentials()

s3 = boto3.client('s3')

# read a configuration file
with open("prod_config.yml", 'r') as stream:
    config = yaml.load(stream)

awsauth = AWS4Auth(credentials.access_key, credentials.secret_key,
                   config.get('region'), config.get('service'))

es = Elasticsearch(hosts=[{
    'host': config.get('eshost'),
    'port': 443
}],
                   http_auth=awsauth,
                   use_ssl=True,
                   verify_certs=True,
                   connection_class=RequestsHttpConnection)


def run(event, context):
    bucket = event['Records'][0]['s3']['bucket']['name']
    key = event['Records'][0]['s3']['object']['key']
    # need to get the file from S3