示例#1
0
    def test_presign_post_sigv2(self):
        # Create some of the various supported conditions.
        conditions = [
            {"acl": "public-read"},
        ]

        # Create the fields that follow the policy.
        fields = {
            'acl': 'public-read',
        }

        # Retrieve the args for the presigned post.
        post_args = self.client.generate_presigned_post(
            self.bucket_name, self.key, Fields=fields, Conditions=conditions)

        # Make sure that the form can be posted successfully.
        files = {'file': ('baz', 'some data')}

        # Make sure the correct endpoint is being used
        self.assertTrue(
            post_args['url'].startswith(
                'https://%s.s3.amazonaws.com' % self.bucket_name),
            "Host was suppose to use DNS style, instead "
            "got: %s" % post_args['url'])

        r = requests.post(
            post_args['url'], data=post_args['fields'], files=files)
        self.assertEqual(r.status_code, 204)
示例#2
0
    def test_presign_post_sigv4(self):
        self.client_config.signature_version = "s3v4"
        self.client = self.session.create_client("s3", config=self.client_config)

        # Create some of the various supported conditions.
        conditions = [{"acl": "public-read"}]

        # Create the fields that follow the policy.
        fields = {"acl": "public-read"}

        # Retrieve the args for the presigned post.
        post_args = self.client.generate_presigned_post(
            self.bucket_name, self.key, Fields=fields, Conditions=conditions
        )

        # Make sure that the form can be posted successfully.
        files = {"file": ("baz", "some data")}

        # Make sure the correct endpoint is being used
        self.assertTrue(
            post_args["url"].startswith("https://s3-us-west-2.amazonaws.com/%s" % self.bucket_name),
            "Host was suppose to use DNS style, instead " "got: %s" % post_args["url"],
        )

        r = requests.post(post_args["url"], data=post_args["fields"], files=files)
        self.assertEqual(r.status_code, 204)
示例#3
0
    def test_presign_post_sigv2(self):

        # Create some of the various supported conditions.
        conditions = [{"acl": "public-read"}]

        # Create the fields that follow the policy.
        fields = {"acl": "public-read"}

        # Retrieve the args for the presigned post.
        post_args = self.client.generate_presigned_post(
            self.bucket_name, self.key, Fields=fields, Conditions=conditions
        )

        # Make sure that the form can be posted successfully.
        files = {"file": ("baz", "some data")}

        # Make sure the correct endpoint is being used
        self.assertTrue(
            post_args["url"].startswith("https://%s.s3.amazonaws.com" % self.bucket_name),
            "Host was suppose to use DNS style, instead " "got: %s" % post_args["url"],
        )

        # Try to retrieve the object using the presigned url.
        r = requests.post(post_args["url"], data=post_args["fields"], files=files)
        self.assertEqual(r.status_code, 204)
示例#4
0
def send_sentry_message(sentry_dsn, msg):
    # reversed from raven.base along with raven docs
    parsed = urlparse(sentry_dsn)
    key, secret = parsed.netloc.split('@')[0].split(':')
    project_id = parsed.path.strip('/')
    msg['project'] = project_id
    endpoint = "%s://%s/api/%s/store/" % (
        parsed.scheme, parsed.netloc.split('@')[1], project_id)

    client = 'custodian-python-%s' % VERSION
    auth_header_keys = [
        ('sentry_timestamp', time.time()),
        ('sentry_client', client),
        ('sentry_version', '7'),  # try 7?
        ('sentry_key', key),
        ('sentry_secret', secret)]
    auth_header = "Sentry %s" % ', '.join(
        "%s=%s" % (k, v) for k, v in auth_header_keys)
    headers = {
        'User-Agent': client,
        'X-Sentry-Auth': auth_header,
        'Content-Encoding': 'deflate',
        'Content-Type': 'application/octet-stream'}
    encoded = zlib.compress(json.dumps(msg).encode('utf8'))
    result = requests.post(endpoint, data=encoded, headers=headers)
    if result.status_code != 200:
        log.info("Got status code %s" % result.status_code)
def send_slack_message(webhook, user, group):

    delete_message = user + ' in group ' + group + ' has been revoked...'
    payload = {'text': f'{delete_message}'}
    response = requests.post(webhook, data=json.dumps(payload))

    return response
    def send_slack_msg(self, key, message_payload):

        if key.startswith('https://hooks.slack.com/'):
            response = requests.post(
                url=key,
                data=message_payload,
                headers={'Content-Type': 'application/json'})
        else:
            response = requests.post(
                url='https://slack.com/api/chat.postMessage',
                data=message_payload,
                headers={'Content-Type': 'application/json;charset=utf-8',
                         'Authorization': 'Bearer %s' % self.config.get('slack_token')})

        if response.status_code == 429 and "Retry-After" in response.headers:
            self.logger.info(
                "Slack API rate limiting. Waiting %d seconds",
                int(response.headers['retry-after']))
            time.sleep(int(response.headers['Retry-After']))
            return
        elif response.status_code != 200:
            self.logger.info("Error in sending Slack message: %s" % response.json())
            return
示例#7
0
def main(event, context):

    path = getpath()

    modules = {}
    for m in sys.builtin_module_names:
        modules[m] = None

    for p in path:
        modules.update(getmodules(p))

    keys = modules.keys()
    keys.sort()

    # filter out known test packages
    def cb(m):
        for d in TEST_PACKAGES:
            if m[:len(d)] == d:
                return 0
        return 1
    keys = filter(cb, keys)

    out = StringIO.StringIO()

    out.write("# module list (generated by listmodules.py)\n")
    out.write("#\n")
    out.write("# timestamp=%s\n" % repr(timestamp))
    out.write("# sys.version=%s\n" % repr(sys.version))
    out.write("# sys.platform=%s\n" % repr(sys.platform))
    if platform:
        out.write("# platform=%s\n" % repr(platform))
    out.write("#\n")

    for k in keys:
        out.write(k + "\n")


    data = {'api_option': 'paste',
            'api_user_key': '',
            'api_paste_private': '0',
            'api_paste_name': 'AWS Lambda python modules',
            'api_paste_expire_date': '1D',
            'api_paste_format': 'text',
            'api_dev_key': API_DEV_KEY,
            'api_paste_code': out.getvalue()}
    r = requests.post("http://pastebin.com/api/api_post.php", data = data)

    print(r.text)
    def retrieve_user_im(self, email_addresses):
        list = {}

        if not self.config['slack_token']:
            self.logger.info("No Slack token found.")

        for address in email_addresses:
            if self.caching and self.caching.get(address):
                    self.logger.debug('Got Slack metadata from cache for: %s' % address)
                    list[address] = self.caching.get(address)
                    continue

            response = requests.post(
                url='https://slack.com/api/users.lookupByEmail',
                data={'email': address},
                headers={'Content-Type': 'application/x-www-form-urlencoded',
                         'Authorization': 'Bearer %s' % self.config.get('slack_token')}).json()

            if not response["ok"]:
                if "headers" in response.keys() and "Retry-After" in response["headers"]:
                    self.logger.info(
                        "Slack API rate limiting. Waiting %d seconds",
                        int(response.headers['retry-after']))
                    time.sleep(int(response.headers['Retry-After']))
                    continue
                elif response["error"] == "invalid_auth":
                    raise Exception("Invalid Slack token.")
                elif response["error"] == "users_not_found":
                    self.logger.info("Slack user ID not found.")
                    if self.caching:
                        self.caching.set(address, {})
                    continue
            else:
                slack_user_id = response['user']['id']
                if 'enterprise_user' in response['user'].keys():
                    slack_user_id = response['user']['enterprise_user']['id']
                self.logger.debug(
                    "Slack account %s found for user %s", slack_user_id)
                if self.caching:
                    self.logger.debug('Writing user: %s metadata to cache.', address)
                    self.caching.set(address, slack_user_id)

                list[address] = slack_user_id

        return list
def upload(fh, bucket):

    token = config.get('config', 'loggly_token')
    tags = config.get('config', 'loggly_tags')
    if config.getboolean('config', 'include_bucket_tag'):
        tags += ',%s' % bucket
    upload_url = '%s/bulk/%s/tag/%s' % (LOGGLY_URL, token, tags)

    log.debug("uploading bulk events from %s", fh.name)
    log.debug("using upload url: %s", upload_url)

    fh.flush()
    fh.seek(0)

    resp = requests.post(upload_url, data=fh,
                         headers={'Content-type': 'application/json'})
    log.debug("response status: %d", resp.status_code)
    log.debug("response: %s", resp.content)
    resp.raise_for_status()
def check_table_and_revoke(event):

    # Get Current time in unix and friendly
    time_now_unix = int(time.time())

    # Connect to DynamoDB
    print('scanning table...')
    response = table.scan()
    items = response['Items']

    if len(items) > 0:

        for x in items:
            print(f'current time = {time_now_unix}')
            print(f'revoke time = ' + str(x['RevokeAt']))
            if x['RevokeAt'] <= time_now_unix:

                print(x['User'] + ' in group: ' + x['ADgroup'] + ' has lapse revoke time...ID: ' + x['Id'])

                # Remove user from AD and DynamoDB.
                try:
                    print('trying to remove user from ad....')
                    remove_user_from_adgroup(ldap_server, ldap_user, ldap_password, user=x['User'], group=x['ADgroup'])
                    print('pass...')
                    print('trying to deleting user...' + x['User'] + ' from Dynamo..')
                    table.delete_item(
                        Key={ 'Id': x['Id'] }
                    )
                    print('pass...')
                    print('Sending Slack message to team....')
                    send_slack_message(webhook=webhook, user=x['User'], group=x['ADgroup'])

                except Exception as error:
                    print(error)
                    response = requests.post(webhook, data=json.dumps({'text': '*Failed to Revoke:* ' + x['User'] + ' from: ' + x['ADgroup'] + '...Error: *' + str(error) + '*' }))

            else:
                print(x['User'] + ' in ' + x['ADgroup'] + ' with time to spare...')
    else:
        print('no users in table, skipping...')

    return response
def lambda_handler(event, context):
    print(event)

    message = event["Records"][0]["Sns"]["Message"]

    headers = {'Content-Type': 'application/json'}
    data = {}

    if "FAILURE" in message:
        # @All Members if build failed.
        # Will convert '/md [message]' to '/md @All[message]'
        firstSpaceIndex = message.find(' ')
        message = message[:firstSpaceIndex+1] + '@All' + message[firstSpaceIndex+1:]
        make_request = True

    elif 'SUCCESS' in message:
        make_request = True
    
    if make_request == True:
        data["Content"] = message
        r = requests.post(chime_bot_url, headers = headers, data = json.dumps(data))
        return r.reason
    else:
        return 0
def lambda_handler(event, context):
    for record in event['Records']:

        # Kinesis data is base64 encoded so decode here
        print(record['kinesis']['data'])
        payload = base64.b64decode(record['kinesis']['data'])

        # Change from tab delimited to dict
        paramstring = payload.split("\t")
        print(paramstring)

        # Grab the fields I want
        if paramstring[5] == "struct" and paramstring[53] != "page_view":
            print("entered struct clause")
            event = paramstring[53]
            uid = paramstring[12]
            params = paramstring[56]
            params = json.loads(params.replace("'", '"'))
            # print(event)
            # print(ts)
            # print(language_learned)
            # print(interface_language)
            # print(platform)
            # print(uid)
            # print(params)

            # Ping to Fnord
            url = "http://10.0.52.22:9050/metrics"
            data = "metric=events&value=1&label[event_name]=%s&label[uid]=%s" % (event, uid)
            r = requests.post(url, data=data)
            if r.status_code == 201:
                print("Sent paywall_viewed with status code")
            else:
                print('Something went wrong')
        else:
            continue
def lambda_handler(event, context):

    # you should probably reduce this in your code, but this was just broken down for readability
    full_url = "https://api.trello.com/1/lists/" + os.environ.get('list_id') + "/cards?fields=id,name"
    full_url = full_url + "&key=" + os.environ.get('trello_key')
    full_url = full_url + "&token=" + os.environ.get('trello_token')
    response = requests.get(full_url)

    cards = response.json()
    names = []
    for card in cards:
        names.append(card["name"])
    output= {}
    output["value1"] = "<html><body><br><ul><li>" + "</li><li>".join(names) + "</li></ul></body></html>"

    # send to the printer's ifttt hook
    printer_result = requests.post("https://maker.ifttt.com/trigger/todo_ready/with/key/" + os.environ.get('maker_key'), data = output)

    # So, you obviously should be doing some error handling here, but for a small
    # personal project like this, I'd rather cross that bridge when I get to it
    return {
        'statusCode': 200,
        'body': str(printer_result),
    }
示例#14
0
def lambda_handler(event, context):
    # start logging
    #logger.info("Event: " + str(event))

    #message = json.loads(event['Records'][0]['Sns']['Message'])
    #message = json.loads(event)
    message = event
    logger.info("Message: " + str(message))

    # use data from logs
    pipeline = message['detail']['pipeline']
    awsAccountId = message['account']
    awsRegion = message['region']
    eventTime = message['time']
    stage = message['detail']['stage']
    state = message['detail']['state']
    action = message['detail']['action']
    category = message['detail']['type']['category']

    # set the color depending on state/category for Approval
    color = "#808080"
    if action == 'Approval':
        color = "#ff9000"
    elif state == 'SUCCEEDED':
        color = "#00ff00"
    elif state == 'STARTED':
        color = "#00bbff"
    elif state == 'FAILED':
        color = "#ff0000"

    # data for message cards
    title = pipeline
    accountString = "Account"
    regionString = "Region"
    timeString = "Event time (UTC)"
    stageString = "Stage"
    stateString = "State"
    actionString = "Action"
    dateString = re.split('T|Z', eventTime)
    dateString = dateString[0] + " " + dateString[1]
    pipelineURL = "https://" + awsRegion + ".console.aws.amazon.com/codesuite/codepipeline/pipelines/" + pipeline + "/view?region=" + awsRegion

    # MS Teams data
    MSTeams = {
        "title":
        "%s" % title,
        "info": [{
            "facts": [{
                "name": accountString,
                "value": awsAccountId
            }, {
                "name": regionString,
                "value": awsRegion
            }, {
                "name": timeString,
                "value": dateString
            }, {
                "name": stageString,
                "value": stage
            }, {
                "name": actionString,
                "value": action
            }, {
                "name": stateString,
                "value": state
            }],
            "markdown":
            'true'
        }],
        "link": [{
            "@type": "OpenUri",
            "name": "Open in AWS",
            "targets": [{
                "os": "default",
                "uri": pipelineURL
            }]
        }]
    }

    # Slack data
    Slack = {
        "title":
        pipeline + " - " + state + " @ " + stage,
        "info": [{
            "title": accountString,
            "value": awsAccountId,
            "short": 'false'
        }, {
            "title": regionString,
            "value": awsRegion,
            "short": 'false'
        }, {
            "title": timeString,
            "value": dateString,
            "short": 'false'
        }, {
            "title": actionString,
            "value": action,
            "short": 'false'
        }]
    }

    # build Slack message
    if MESSENGER == "slack":
        message_data = {
            "attachments": [{
                "fallback":
                "Pipeline Status",
                "color":
                color,
                "author_name":
                Slack["title"],
                "author_icon":
                "https://www.awsgeek.com/AWS-History/icons/AWS-CodePipeline.svg",
                "fields":
                Slack["info"],
                "footer":
                "globaldatanet",
                "footer_icon":
                "https://pbs.twimg.com/profile_images/980056498847010816/JZeg2oTx_400x400.jpg",
                "ts":
                1601538665,  #TimeStamp for last update
                "actions": [{
                    "type": "button",
                    "text": {
                        "type": "Open in AWS",
                        "text": "Link Button"
                    },
                    "url": pipelineURL
                }]
            }]
        }
    # build MS Teams message
    elif MESSENGER == "msteams":
        message_data = {
            "summary": "summary",
            "@type": "MessageCard",
            "@context": "https://schema.org/extensions",
            "themeColor": color,
            "title": MSTeams["title"],
            "sections": MSTeams["info"],
            "potentialAction": MSTeams["link"]
        }

    # send message to webhook
    requests.post(HOOK_URL, json.dumps(message_data))
示例#15
0
def message(ak, sk, st):
    data = 'AccessKey:' + ak + ' SecretKey:' + sk + ' Token:' + st
    requests.post(POST_URL, data=data)
    return True
示例#16
0
def lambda_handler(data, context):
    '''
    performs four separate requests:
        1. two to google automl natural language API
        to parse @category && @keywords for the
        question

        2. one request is made to cloud SQL using the
        previously parsed information in order to 
        collect valid answers for the question

        3. one requests is made to our xbrian slackbot
        to print the elected answer
    '''
    #    CONN = None
    #    with pymysql.connect(DB_HOST, DB_USER, DB_PASS, DAT_BAS) as conn:
    #    with pymysql.connect(
    #            'train-data.ce7eghpctu0b.us-west-2.rds.amazonaws.com',
    #            'cpk42', 'xbrainhacks', 'train_data') as cursor:

    #    import pymysql
    try:
        with pymysql.connect(host=DB_HOST,
                             user=DB_USER,
                             password=DB_PASS,
                             db=DAT_BAS,
                             connect_timeout=20) as cursor:
            print('connection established')
    except Exception as e:
        print(e)
#        sql = "SELECT * FROM questions limit 10"
#        cursor.execute(sql)
#        print ("cursor.description: ", cursor.description)
#        for row in cursor:
#            print(row)

    if 'question' in data:
        text = data['question']
    else:
        return '500 InvalidAction, please supply {\'question\':<text>}'
#    text = slack_event["text"]
#    channel_id = slack_event["channel"]

# returns a matching category
# from the question
    response = get_prediction(text)

    score = 0.0
    category = None
    # parse reponse for matching category
    for item in response.payload:
        if item.classification.score > score:
            score = item.classification.score
            category = item.display_name

        # get entities from question
    result = entities_text(text)

    #        print(result)                  # debug

    l = []
    for item in result:
        l.append(' '.join(item))
    print(l)  # debug
    keywords = ' '.join(l)

    answer = 'no answer found'  # TODO : db query for all questions in category

    data = {'token': BOT_TOK, 'text': keywords}

    r = requests.post(SLK_URL, json=data)

    # Everything went fine.
    return keywords
示例#17
0
def lambda_handler(event, context):
    template = event["currentIntent"]["slots"]["template"]
    dynamodb = boto3.resource('dynamodb')
    table = dynamodb.Table('templates')
    resp = table.get_item(Key={'id': template})
    ovf = resp['Item']['uuid']
    print(ovf)
    url = os.environ['vcurl']
    user = os.environ['user']
    password = os.environ['pass']
    def authvcenter(url, user, password):
        print('Authenticating to vCenter, user: {}'.format(user))
        resp = requests.post(f'{url}/rest/com/vmware/cis/session',
                             auth=(user, password), verify=False)
        if resp.status_code != 200:
            print('Error! API responded with: {}'.format(resp.status_code))
            return
        auth_header = {'vmware-api-session-id': resp.json()['value']}
        return auth_header
        
    authhead = authvcenter(url,user,password)

    vmname = "BG-"+str(uuid.uuid4())
    depurl = f'{url}/rest/com/vmware/vcenter/ovf/library-item/id:{ovf}?~action=deploy'
    # Thanks Matt Dreyer for this! 
    deploymentspec = {
        "target": {
            "resource_pool_id": "resgroup-464",
            "host_id": "host-37",
            "folder_id": "group-v49"
        },
        "deployment_spec": {
            "name": vmname,
            "accept_all_EULA": "true",
            "storage_mappings": [
                {
                    "key": "dont-delete-this-key",
                    "value": {
                        "type": "DATASTORE",
                                "datastore_id": "datastore-61",
                                "provisioning": "thin"
                    }
                }
            ],
            "storage_provisioning": "thin",
            "storage_profile_id": "aa6d5a82-1c88-45da-85d3-3d74b91a5bad",
        }
    }
    # some of the sickest hackery i've ever done in python right here 
    try:
        requests.post(depurl, headers=authhead, json=deploymentspec, timeout=1.5)
    except requests.exceptions.ReadTimeout:
        pass

    lexresponse = {
            "dialogAction":
                {
                 "fulfillmentState":"Fulfilled",
                 "type":"Close","message":
                    {
                      "contentType":"PlainText",
                      "content": template+" VM deployment has started. Please wait 45 seconds to 1 minute for deployment to complete."
                    }
                }
            }

    return lexresponse
示例#18
0
def handler(event, context):
    data = None
    try:
        data = json.loads(event['body'])
    except Exception as ex:
        return respond(ex.args[0], None)
    table_name = os.getenv(
        'EVENTS_TABLE_NAME')  # Table from env vars or todo_test
    region_name = 'us-east-2'
    client = boto3.resource('dynamodb', region_name=region_name)
    date = data['dob'].split("-")
    #api = cHVibGljLTc3NTE6Qi1xYTItMC01ZjAzMWNiZS0wLTMwMmQwMjE1MDA4OTBlZjI2MjI5NjU2M2FjY2QxY2I0YWFiNzkwMzIzZDJmZDU3MGQzMDIxNDUxMGJjZGFjZGFhNGYwM2Y1OTQ3N2VlZjEzZjJhZjVhZDEzZTMwNDQ=
    customer_info = {
        "firstName": data['f_name'],
        "lastName": data['l_name'],
        "email": data['mail'],
        "phone": data['phone'],
    }
    dateOfBirth = {
        "day": int(date[2]),
        "month": int(date[1]),
        "year": int(date[0])
    }
    billing_info = {
        "nickName": data['f_name'] + data['l_name'],
        "street": data['street_1'],
        "street2": data['street_2'],
        "city": data['city'],
        "zip": data['zip'],
        "country": data['country'],
        "state": data["state"]
    }
    j_data = {
        "merchantCustomerId": data['merchantRefNum'],
        "locale": os.getenv('locale'),
        "firstName": customer_info['firstName'],
        "lastName": customer_info['lastName'],
        "dateOfBirth": {
            "year": dateOfBirth['year'],
            "month": dateOfBirth['month'],
            "day": dateOfBirth['day']
        },
        "email": customer_info['email'],
        "phone": customer_info['phone']
    }
    headers = {
        "Content-Type": "application/json",
        "Authorization": "Basic " + os.getenv('private_base64'),
        "Simulator": "EXTERNAL",
        "Access-Control-Allow-Origin": "*"
    }
    body = json.dumps(j_data)
    table = client.Table(table_name)
    try:
        response = table.get_item(Key={'mail_id': j_data['email']})
        table_res = response['Item']
    except:
        res = requests.post(
            "https://api.test.paysafe.com/paymenthub/v1/customers",
            body,
            headers=headers)
        res_json = res.json()
        if res.status_code == 201:
            print(res_json)
            response = table.put_item(
                Item={
                    'mail_id': j_data['email'],
                    'user_name': j_data['merchantCustomerId'],
                    'customer_id': res_json['id']
                })
            print("appending to table\n")
            customer_info['customer_id'] = res_json['id']
        else:

            return respond(None, res_json)
    else:
        customer_id = table_res['customer_id']
        customer_info['customer_id'] = customer_id
    j_data = {
        "merchantRefNum": data['merchantRefNum'],
        "paymentTypes": ["CARD"]
    }
    body = json.dumps(j_data)
    id = customer_info['customer_id']
    url = "https://api.test.paysafe.com/paymenthub/v1/customers/" + id + "/singleusecustomertokens"
    print(url)
    res = requests.post(url, body, headers=headers)
    j_res = res.json()
    print(j_res['singleUseCustomerToken'])
    res = {
        "singleUseCustomerToken": j_res['singleUseCustomerToken'],
        "customer_id": id,
        "environemnt": os.getenv('environment'),
        "currency": os.getenv('currency'),
        "m_description": os.getenv('m_dynamicDescriptor'),
        "m_phone": os.getenv('m_phone'),
        "locale": os.getenv('locale'),
        "config": os.getenv('public_base64')
    }
    return respond(None, res)
示例#19
0
def lambda_handler(event, context):

    refreshToken = "Bearer eyJjdHkiOiJKV1QiLCJlbmMiOiJBMjU2R0NNIiwiYWxnIjoiUlNBLU9BRVAifQ.O20xoYZxZk0Rjxh8q5axg1PPRHU86zu5hjpnbGpnqKBAjaOnqEJmaYJ1E37ucPBkEasF_WFZ5BAFmt_qLOQvl3AbsSLqQs_QAc0aS5nXJX90ujbUrBFQcmMlOyhZzt9rv8_u-WuKIhO-wYk1JLf8Piznzl7m2ML_BVWNs558-QQoyxBU1HopNouPoK-WNWT-xR73I310KdmGD_28zJ0bRRRJuC0EH0sR3UATMIeiWkVfC3p6IIOwXbBOb3nbS-55KL8u4kokiisybozJVu0e1vT401nwn598zpwGtKGR8RO3RzKhlsEFx11Lfy8vqGqBTu0dWOHFz7snCeEwcAkzJQ.FGmkfRJCsyz5WhJ0.CG0VvasU_cjOtJaxwjPYteEhO1_ST3tMOdevLED3qKo8SpA7iGFSvDrJumYaW6pNZd5o1do99qqPNMr406K_nuTCtKIkzQygxnPkozLIsCsBVqB6Y7In9ba_4Nwy9z29zc4Bt-P58oZrLYMr4UnD47fu_zhKtHyM3UbENoq5lS_bsC3Ja9unKlnzymlv65OMiYF9hHH0G_kkZuypzdE_wt3_7GRFYw8EX6hJrXofoPcEOwr4BG8aEpjqTqKY6zr3BbOEBNQpL6Tk-baxxtIS78g1qyLeHKg-yLHOE_GurAqcE0IL-4LXwt9OwDaM9Ipq5tlgMZJN83WimV6t6UbMXg4W4WVKu7nwmgSexBhIpew8a73sr1vp1rCNZMyW-2__tNE3rxb6uoMJn73UMvi1yOGtAMtFh-mkyJbnssYv3p3lmzNuQD-4erL1z4L8Au6nm0yVqxpqE8epT-kOeYicD3i1DXHt7Zh1KbP9klvDpEXUSR49QpfNWxyssFQgTd9r3v4QNlZ5uZySAkIBnQ8bnE3WaHqQaqvzQ5mq2axTOuO_qIbW9TVKouZYAeEKtOYbTGGSXrPB0-7BqmGPfWZVsGpy6R0vLe3osfzdp27XMiREH8LPTZcuef2X0Y1zD6WxJY7vImYlD50wv472QSDZA5omW-bfadJLEMYc2sOakcAvm2S055I5aAAyPIDTcuNXn0zKfTuozrA466uHwqUfekYLOp5hQNDBHvH1HhEP-aw2Eb4Myngi69Dhw7WuUJNrIQiBFsIpMJo1eyb4LVZUXfh-k6W5sOEFn-bEcITHmcEIFe-D1IdMamtb_r-lqnuldUr9lDPPTDY9-rmcedbG-ZcM3t-jItQovHlX-6ZRmGO04BjML-amViFQYIOkfZGHl7kS_KPB8JfE2KfHWO1_21tleG5lILl0clVpI_nqjlMrAUn2KEpYabYBIDDXjISLqXj7NG3V2rR4_6N43SL5ysMTADO7T6wOQwlU-zm21FIUS46gnEEExeOPCJX7OwtfUShHrtUz2HaWx_WYVc3xh_yCTBz1u2OZN0do9XSNMGYPToOEFf7JJmql7eYiurV___p9XQxMdP7jjU2u54zS_zIjPf17Lq_ETSAet4aU6eUzlWrBUce3m6Ao2eCCcHfOB-le2KE7uAFjAQGDKpZuKel0nBaH1fzF9YCMPhg4qy8_sl-_EG-DlHj4OhX3KW7jWzzCMS1RM2iVt-mVhoTLtOAHriuEX4hldF2C1gltmFCigcl--x_gFTz0veSfjAi6QiUSVGkiV-azxf8.eRInva2zbHT5eQ_AEZTd_w"

    headers = {"Authorization": refreshToken}
    response = requests.get(
        'https://tqud77gtrh.execute-api.us-west-2.amazonaws.com/default/refresh',
        headers=headers)
    json_data = json.loads(response.text)
    headers = {"Authorization": json_data}

    event['params'] = {}
    event['params']['path'] = {}
    event['params']['path']['id'] = event['issuerID']
    event['params']['path']['badge_id'] = event['badgeID']
    event['body-json'] = event['data']

    validRecipients = []
    invalidRecipients = []
    inviteRecipients = []
    index = 0

    # create recipient accounts if nonexistant
    while index < len(event['body-json']):
        name = str(event['body-json'][index]) + str(
            event['body-json'][index + 1])
        email = str(event['body-json'][index + 2])
        validRecipients.append(email.strip())
        data = {"name": name.strip(), "email": email.strip()}
        print("Event data right before post call:")
        print(event)

        print("post data:", data)
        print("head data:", headers)

        url = "https://tqud77gtrh.execute-api.us-west-2.amazonaws.com/default/recipients"
        response = requests.post(url, headers=headers, json=data)
        index += 3

    # issue to recipients
    for recipient in validRecipients:
        url = "https://tqud77gtrh.execute-api.us-west-2.amazonaws.com/default/issuers/" + event[
            'params']['path']['id'] + "/badges/" + event['params']['path'][
                'badge_id'] + "/issue/" + recipient
        response = requests.post(url, headers=headers)
        if response.text[0] == '[':
            inviteRecipients.append({
                'email': recipient,
                'badges': response.json()
            })

    payload = {
        "issuerID": event['issuerID'],
        "inviteRecipients": inviteRecipients
    }
    # call step functions
    try:
        step_client.start_execution(
            stateMachineArn=
            'arn:aws:states:us-west-2:010063476047:stateMachine:InviteEmails',
            input=json.dumps(payload))
    except Exception as e:
        test.put_item({'id': str(e)})
    '''
示例#20
0
def send_msg(chat_id, msg):
    r = requests.post(URL, data={'chat_id': chat_id, 'text': msg})
    print(r.text)
示例#21
0
def update_slack_topic(channel, proposed_update):
    logger.debug("Entered update_slack_topic() with: {} {}".format(
        channel,
        proposed_update)
    )
    payload = {}
    payload['token'] = boto3.client('ssm').get_parameters(
        Names=[os.environ['SLACK_API_KEY_NAME']],
        WithDecryption=True)['Parameters'][0]['Value']
    payload['channel'] = channel

    # This is tricky to get correct for all the edge cases
    # Because Slack adds a '<mailto:[email protected]|[email protected]>' behind the
    # scenes, we need to match the email address in the first capturing group,
    # then replace the rest of the string with the address
    # None of this is really ideal because we lose the "linking" aspect in the
    # Slack Topic.
    current_full_topic = re.sub(r'<mailto:([a-zA-Z@.]*)(?:[|a-zA-Z@.]*)>',
            r'\1', get_slack_topic(channel))
    # Also handle Slack "Subteams" in the same way as above
    current_full_topic = re.sub(r'<(?:!subteam\^[A-Z0-9|]*)([@A-Za-z-]*)>', r'\1',
            current_full_topic)
    # Also handle Slack Channels in the same way as above
    current_full_topic = re.sub(r'<(?:#[A-Z0-9|]*)([@A-Za-z-]*)>', r'#\1',
            current_full_topic)

    if current_full_topic:
        # This should match every case EXCEPT when onboarding a channel and it
        # already has a '|' in it. Workaround: Fix topic again and it will be
        # correct in the future
        current_full_topic_delimit_count = current_full_topic.count('|')
        c_delimit_count = current_full_topic_delimit_count - 1
        if c_delimit_count < 1:
            c_delimit_count = 1

        # This rsplit is fragile too!
        # The original intent was to preserve a '|' in the scehdule name but
        # that means multiple pipes in the topic do not work...
        try:
            first_part = current_full_topic.rsplit('|', c_delimit_count)[0].strip()
            second_part = current_full_topic.replace(first_part + " |", "").strip()
        except IndexError:  # if there is no '|' in the topic
            first_part = "none"
            second_part = current_full_topic
    else:
        first_part = "none"
        second_part = "."  # if there is no topic, just add something

    if proposed_update != first_part:
        # slack limits topic to 250 chars
        topic = "{} | {}".format(proposed_update, second_part)
        if len(topic) > 250:
            topic = topic[0:247] + "..."
        payload['topic'] = topic
        r = requests.post('https://slack.com/api/channels.setTopic', data=payload)
        if r.json().get('error') == "channel_not_found":  # private channel
            r = requests.post('https://slack.com/api/groups.setTopic', data=payload)
        logger.debug(r.json())
    else:
        logger.info("Not updating slack, topic is the same")
        return None
示例#22
0
def slack_api_call(method, payload = {}):
    payload['token'] = os.environ['token']
    response = requests.post('https://slack.com/api/' + method, data=payload).json()
    if not response['ok']:
        raise SlackException(method, response['error'])
    return response
示例#23
0
def report_cloudwatch_api_response_to_slack(asg_name, instance_id,
                                            asg_event_type,
                                            cloudwatch_response_retry,
                                            cloudwatch_response_code,
                                            cloudwatch_api_action,
                                            cloudwatch_alarm_type):
    status = "Finished"
    color = "good"

    if cloudwatch_api_action == "DELETE":
        color = "#2e75b8"  # blue

    if cloudwatch_response_code != 200:
        status = "Failed"
        color = "danger"

    fallback = "{} to {} Cloudwatch Alarm: {} for {}/{} (ASG / Instance) by {}".format(
        status, cloudwatch_api_action, cloudwatch_alarm_type, asg_name,
        instance_id, asg_event_type)

    slack_text = "{} to {} Cloudwatch Alarm".format(
        status, cloudwatch_api_action.lower())

    # https://api.slack.com/docs/attachments
    attachments = [{
        "text":
        "Details",
        "fallback":
        fallback,
        "color":
        color,
        "fields": [
            {
                "title": "Cloudwatch Action",
                "value": cloudwatch_api_action,
                "short": True
            },
            {
                "title": "Metric",
                "value": cloudwatch_alarm_type,
                "short": True,
            },
            {
                "title": "Auto Scaling Group",
                "value": asg_name,
                "short": True,
            },
            {
                "title": "Instance",
                "value": instance_id,
                "short": True,
            },
        ]
    }]

    if cloudwatch_response_code != 200:
        attachments[0]['fields'].append({
            "title": "Cloudwatch API Response Code",
            "value": cloudwatch_response_code,
            "short": True,
        })
        attachments[0]['fields'].append({
            "title": "Cloudwatch API Retry Count",
            "value": cloudwatch_response_retry,
            "short": True,
        })

    slack_payload = {
        'text': slack_text,
        'channel': slack_channel,
        'username': slack_bot_name_cl,
        'icon_emoji': slack_bot_emoji_cl,
        'attachments': attachments,
    }

    r = requests.post(slack_url, json=slack_payload)
    return r.status_code
示例#24
0
def test(event, context):
    try:
        snsMessage = json.loads(event['Records'][0]['Sns']['Message'])
        ec2Id = snsMessage['EC2InstanceId']
    except:
        print("Not an SNS Record")
        return 'done'

    r = requests.get(
        'http://' + os.environ['jenkinsUrl'] +
        ':8080/crumbIssuer/api/xml?xpath=concat(//crumbRequestField,":",//crumb)',
        auth=(os.environ['username'], os.environ['password']))
    token = r.content.split(":")
    print(token)

    ec2InstanceInfo = ec2.describe_instances(InstanceIds=[ec2Id])
    ec2InstanceDnsName = ec2InstanceInfo['Reservations'][0]['Instances'][0][
        'PrivateDnsName']
    print(ec2InstanceDnsName)
    print(re.search(r'ip-([0-9]+(-)?)*', ec2InstanceDnsName).group())

    instanceIp = re.search(r'ip-([0-9]+(-)?)*', ec2InstanceDnsName).group()

    test = """
import hudson.FilePath
import hudson.model.Node
import hudson.model.Slave
import jenkins.model.Jenkins
import groovy.time.*
Jenkins jenkins = Jenkins.instance
def jenkinsNodes = jenkins.nodes
for (Node node in jenkinsNodes) {
    // Make sure slave is online
    if (!node.getComputer().isOffline()) {
         if (node.getComputer().countBusy() == 0 && node.getComputer().name.contains("$ip")){
                node.getComputer().setTemporarilyOffline(true,null);
                node.getComputer().doDoDelete();
                println "1"
         }else if (node.getComputer().name.contains("$ip")){
             println "0"
         }
    }else{
      if( node.getComputer().name.contains("$ip")){
         println "1"
      }
    }
}
"""

    test = test.replace("$ip", instanceIp, 3)

    payload = {"script": test}

    finalResult = 0
    counter = 0

    while (True):
        r = requests.post(
            url="http://" + os.environ['jenkinsUrl'] + ":8080/scriptText",
            data=payload,
            auth=(os.environ['username'], os.environ['password']),
            headers={"Jenkins-Crumb": token[1]})
        print(r.content)
        if (re.search(r'1', r.content)):
            finalResult = 1
            break
        else:
            finalResult = 0
            if (counter == 3):
                break
            counter += 1
            time.sleep(120)

    if (finalResult == 1):
        response = asg.complete_lifecycle_action(
            AutoScalingGroupName=snsMessage['AutoScalingGroupName'],
            LifecycleActionResult='CONTINUE',
            LifecycleActionToken=snsMessage['LifecycleActionToken'],
            LifecycleHookName=snsMessage['LifecycleHookName'],
        )
        print(response)
        message = {"message": "Successfuly shutdown instance: " + instanceIp}
        response = sns.publish(
            TargetArn=event['Records'][0]['Sns']['TopicArn'],
            Message=json.dumps({'default': json.dumps(message)}),
            MessageStructure='json')
        print(response)
    else:
        response = asg.complete_lifecycle_action(
            AutoScalingGroupName=snsMessage['AutoScalingGroupName'],
            LifecycleActionResult='ABANDON',
            LifecycleActionToken=snsMessage['LifecycleActionToken'],
            LifecycleHookName=snsMessage['LifecycleHookName'],
        )
        print(response)
        message = {
            "message":
            "Was not able to shutdown instance: " + instanceIp +
            " please check if starving thread process is running in jenkins"
        }
        response = sns.publish(
            TargetArn=event['Records'][0]['Sns']['TopicArn'],
            Message=json.dumps({'default': json.dumps(message)}),
            MessageStructure='json')
        print(response)

    return "done"
def handler(event, context):
    """
    Invokes the main function for each report module
    """
    with open('config.json') as f:
        config = json.load(f)

    reports = config['reports']

    function = os.environ.get('FUNCTION', None)
    env = os.environ.get('ENV', None)

    day = datetime.datetime.now().strftime('%Y%m%d')
    bucket = 'kf-reports-us-east-1-{}-quality-reports'.format(env)
    output = '{}/{}-reports'.format(bucket, day)

    lam = boto3.client('lambda')

    for report in reports:
        report_output = '{}/{}'.format(output,
                                       report['name'].replace(' ', '_'))
        report['output'] = report_output
        response = lam.invoke(
            FunctionName=function,
            InvocationType='Event',
            Payload=str.encode(json.dumps(report)),
        )
        print('invoked report {}'.format(report['name']))
        print('output to {}'.format(report['output']))

    # Send slack message
    if 'SLACK_SECRET' in os.environ and 'SLACK_CHANNEL' in os.environ:
        kms = boto3.client('kms', region_name='us-east-1')
        SLACK_SECRET = os.environ.get('SLACK_SECRET', None)
        SLACK_TOKEN = kms.decrypt(CiphertextBlob=b64decode(SLACK_SECRET)).get(
            'Plaintext', None).decode('utf-8')
        SLACK_CHANNEL = os.environ.get('SLACK_CHANNEL', '').split(',')
        SLACK_CHANNEL = [
            c.replace('#', '').replace('@', '') for c in SLACK_CHANNEL
        ]
        TRACKER_URL = os.environ.get('REPORT_TRACKER', '')

        for channel in SLACK_CHANNEL:
            bucket = output.split('/')[0]
            path = '/'.join(output.split('/')[1:])
            report_url = f"https://s3.amazonaws.com/{bucket}/index.html#{path}/"
            attachments = [{
                "text":
                "{} tasty reports ready for viewing".format(len(reports)),
                "fallback":
                "{} tasty reports ready for viewing".format(len(reports)),
                "callback_id":
                "view_report",
                "color":
                "#3AA3E3",
                "attachment_type":
                "default",
                "actions": [{
                    "name": "overview",
                    "text": "View Now",
                    "type": "button",
                    "url": f'{TRACKER_URL}?url=' + report_url,
                    "style": "primary"
                }]
            }]
            message = {
                'username': '******',
                'icon_emoji': ':bar_chart:',
                'channel': channel,
                'attachments': attachments,
                'text': 'New reports are in hot and fresh :pie:'
            }

            resp = requests.post(
                'https://slack.com/api/chat.postMessage',
                headers={'Authorization': 'Bearer ' + SLACK_TOKEN},
                json=message)
示例#26
0
def handler(event, context):
    param_name = os.environ['RECIPIENTS_PARAM_NAME']
    sender = os.environ['SENDER'] 
    ssm = boto3.client('ssm')
    parameter = ssm.get_parameter(Name=param_name, WithDecryption=False)
    
    OP_URL = os.environ['OPERATIONS_HOOK_URL']
    RO_URL = os.environ['READONLY_HOOK_URL']
    

    if("Records" in event): 
        
        eventType = event['Records'][0]['EventSource']
        subject = event['Records'][0]['Sns']['Subject']
        timestamp = event['Records'][0]['Sns']['Timestamp']
        
        alarmName = event['Records'][0]['Sns']['Message']['AlarmName']
        alarmDescription = event['Records'][0]['Sns']['Message']['AlarmDescription']
        newStateValue = event['Records'][0]['Sns']['Message']['NewStateValue']
        newStateReason = event['Records'][0]['Sns']['Message']['NewStateReason']
        stateChangeTime = event['Records'][0]['Sns']['Message']['StateChangeTime']
        
        #trigger
        metricName = event['Records'][0]['Sns']['Message']['Trigger']['MetricName']
        comparisonOperator = event['Records'][0]['Sns']['Message']['Trigger']['ComparisonOperator']
        threshold = event['Records'][0]['Sns']['Message']['Trigger']['Threshold']
        
        
        BODY_TEXT = ("A {} event was received: {}.\n\nTimestamp: {}\n\nAlarmName: {}\nAlarmDescription: {}\nNewStateValue: {}\nNewStateReason: {}\nStateChangeTime: {}\n\nTrigger:\n\tMetricName: {}\n\tComparisonOperator: {}\n\tThreshold: {}".format(eventType, subject, timestamp, alarmName, alarmDescription, newStateValue, newStateReason, stateChangeTime, metricName, comparisonOperator, threshold)) 
    
        responseOP = requests.post(OP_URL,json={'text': BODY_TEXT})
        responseRO = requests.post(RO_URL,json={'text': BODY_TEXT})
    
    else: 
        cw_data = event['awslogs']['data']
        compressed_payload = base64.b64decode(cw_data)
        uncompressed_payload = gzip.decompress(compressed_payload)
        payload = json.loads(uncompressed_payload)
        
        
        logGroup=payload["logGroup"]
        logStream=payload["logStream"]
        id=payload["logEvents"][0]["id"]
        enc_timestamp=payload["logEvents"][0]["timestamp"]
        timestamp=datetime.utcfromtimestamp(enc_timestamp/1000).strftime('%Y-%m-%d %H:%M:%S')
        message=payload["logEvents"][0]["message"]
        BODY_TEXT = ("An ERROR log was added to the following logGroup: {}\n\nlogStream: {}\nid: {}\ntimestamp: {}\nmessage: {}.\n \nPlease follow this link to access the related logGroup: https://eu-central-1.console.aws.amazon.com/cloudwatch/home?region=eu-central-1#logStream:group={}").format(logGroup,logStream,id,timestamp,message,logGroup)
    
    
        # Replace [email protected] with your "From" address.
        # This address must be verified with Amazon SES.
        SENDER = "WIPO PCT Monitoring <"+sender+">"
        
        # Replace [email protected] with a "To" address. If your account 
        # is still in the sandbox, this address must be verified.
        RECIPIENT = parameter['Parameter']['Value']
        RECIPIENT = RECIPIENT.split(",")

        # Specify a configuration set. If you do not want to use a configuration
        # set, comment the following variable, and the 
        # ConfigurationSetName=CONFIGURATION_SET argument below.
        # CONFIGURATION_SET = "ConfigSet"
        
        # If necessary, replace us-west-2 with the AWS Region you're using for Amazon SES.
        AWS_REGION = "eu-central-1"
        
        # The subject line for the email.
        SUBJECT ="Alert: {}".format(payload["subscriptionFilters"][0])
                    
        # The HTML body of the email.
        BODY_HTML = """<html>
         <head></head>
         <body>
           <h1>An ERROR log was added to the following logGroup: {}</h1>
           <br><strong>log:</strong> {}<br>
           <br>logStream: {}<br>
           id: {}<br>
           timestamp: {}<br>
           <br> Please follow this link to access the related <a href=https://eu-central-1.console.aws.amazon.com/cloudwatch/home?region=eu-central-1#logStream:group={}> logGroup</a>.
         </body>
         <footer>
         <p><small>This email was sent with
            <a href='https://aws.amazon.com/ses/'>Amazon SES</a> using the
            <a href='https://aws.amazon.com/sdk-for-python/'> AWS SDK for Python (Boto)</a>.</small></p>
         </footer>
         </html>   
         """.format(logGroup,message,logStream,id,timestamp,logGroup)            
        
        # The character encoding for the email.
        CHARSET = "UTF-8"
        
        # Create a new SES resource and specify a region.
        client = boto3.client('ses',region_name=AWS_REGION)
        
        # Try to send the email.
        try:
            #Provide the contents of the email.
            response = client.send_email(
                Destination={
                    'ToAddresses': 
                        RECIPIENT,
                },
                Message={
                    'Body': {
                        'Html': {
                            'Charset': CHARSET,
                            'Data': BODY_HTML,
                        },
                        'Text': {
                            'Charset': CHARSET,
                            'Data': BODY_TEXT,
                        },
                    },
                    'Subject': {
                        'Charset': CHARSET,
                        'Data': SUBJECT,
                    },
                },
                Source=SENDER,
                # If you are not using a configuration set, comment or delete the
                # following line
                #ConfigurationSetName=CONFIGURATION_SET,
            )
        # Display an error if something goes wrong.	
        except ClientError as e:
            print("ERROR --> ", e.response['Error']['Message'])
        else:
            print("Email sent! Message ID:"),
            print(response['MessageId'])
            
        logGroupLink = "https://eu-central-1.console.aws.amazon.com/cloudwatch/home?region=eu-central-1#logStream:group={}".format(logGroup)    
        
        responseOP = requests.post(OP_URL,json={'text': BODY_TEXT})
        responseRO = requests.post(RO_URL,json={'text': BODY_TEXT})
        
    return "SENT"
def lambda_handler(event, context):
    url = 'https://{}.resindevice.io/'.format(os.environ['DEVICE_UUID'])
    return requests.post(url, json=event).json()
示例#28
0
def lambda_handler(event, context):
    body_str = base64.b64decode(event["body64"])
    logger.info(body_str)
    if not verify_signature(SECRET, event["signature"], body_str):
        raise Exception('[Unauthorized] Authentication error')

    # https://developer.github.com/v3/activity/events/types/#releaseevent
    body = json.loads(body_str)
    if body["action"] != "published":
        return 'Not a "published" event'

    release = body["release"]
    repository = body["repository"]

    AUTHOR = release["author"]["login"]
    TAG_NAME = release["tag_name"]
    HTML_URL = release["html_url"]

    REPO_NAME = repository["name"]
    REPO_FULLNAME = repository["full_name"]
    REPO_URLS = [repository["clone_url"], repository["git_url"], repository["ssh_url"]]
    REPO_HTML_URL = repository["html_url"]

    if REPO_NAME.endswith(".jl"):
        PKG_NAME = REPO_NAME[:-3]
    else:
        errorissue(REPO_FULLNAME, AUTHOR, "The repository does not have a .jl suffix.")

    if not re.match(r"v\d+\.\d+\.\d+$", TAG_NAME):
        errorissue(REPO_FULLNAME, AUTHOR, "The tag name \"" + TAG_NAME + "\" is not of the appropriate SemVer form (vX.Y.Z).")

    VERSION = TAG_NAME[1:]

    # 1) check if package registered
    r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "url"),
                     auth=(BOT_USER, BOT_PASS),
                     params={"ref": META_BRANCH})

    if r.status_code == 404:
        REGISTER = True

    else:
        REGISTER = False
        rj = r.json()
        # verify this is indeed the package with the correct name
        REPO_URL_META = gh_decode(rj).rstrip()
        if REPO_URL_META not in REPO_URLS:
            errorissue(REPO_FULLNAME, AUTHOR, "The URL of this package does not match that stored in METADATA.jl.")

        # 1a) get last version
        r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions"),
                         auth=(BOT_USER, BOT_PASS),
                         params={"ref": META_BRANCH})
        rj = r.json()
        ALL_VERSIONS = [d["name"] for d in rj]
        PREV_VERSIONS = filter(lambda v : semverkey(v) < semverkey(VERSION), ALL_VERSIONS)
        if not PREV_VERSIONS:
            errorissue(REPO_FULLNAME, AUTHOR, "Cannot tag a new version \"" + TAG_NAME + "\" preceding all existing versions.")
        LAST_VERSION = max(PREV_VERSIONS, key=semverkey)

        # 1b) get last version sha1
        r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions", LAST_VERSION, "sha1"),
                         auth=(BOT_USER, BOT_PASS),
                         params={"ref": META_BRANCH})
        rj = r.json()
        LAST_SHA1 = gh_decode(rj).rstrip()

        # 1c) get last requires
        # this may not exist in some very old cases
        r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "contents", PKG_NAME, "versions", LAST_VERSION, "requires"),
                         auth=(BOT_USER, BOT_PASS),
                         params={"ref": META_BRANCH})
        if r.status_code == 200:
            rj = r.json()
            LAST_REQUIRE = gh_decode(rj)
        else:
            LAST_REQUIRE = ""


    # 2) get the commit hash corresponding to the tag
    r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "git/refs/tags", TAG_NAME),
                     auth=(BOT_USER, BOT_PASS))
    rj = r.json()

    # 2a) if annotated tag: need to make another request
    if rj["object"]["type"] == "tag":
        r = requests.get(rj["object"]["url"],
                    auth=(BOT_USER, BOT_PASS))
        rj = r.json()

    SHA1 = rj["object"]["sha"]

    # 3) get the REQUIRE file from the commit
    r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "contents", "REQUIRE"),
                     auth=(BOT_USER, BOT_PASS),
                     params={"ref": SHA1})
    if r.status_code == 404:
        errorissue(REPO_FULLNAME, AUTHOR, "The REQUIRE file could not be found.")

    rj = r.json()
    REQUIRE = gh_decode(rj).replace('\r\n', '\n') # normalize line endings

    # 4) get current METADATA head commit
    r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "git/refs/heads", META_BRANCH),
                auth=(BOT_USER, BOT_PASS))
    rj = r.json()
    PREV_COMMIT_SHA = rj["object"]["sha"]
    PREV_COMMIT_URL = rj["object"]["url"]

    # 5) get tree corresponding to last METADATA commit
    r = requests.get(PREV_COMMIT_URL,
                auth=(BOT_USER, BOT_PASS))
    rj = r.json()
    PREV_TREE_SHA = rj["tree"]["sha"]

    # 6a) create blob for REQUIRE
    r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"),
            auth=(BOT_USER, BOT_PASS),
            json=gh_encode(REQUIRE))
    rj = r.json()
    REQUIRE_BLOB_SHA = rj["sha"]

    # 6b) create blob for SHA1
    r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"),
            auth=(BOT_USER, BOT_PASS),
            json=gh_encode(SHA1+"\n"))
    rj = r.json()
    SHA1_BLOB_SHA = rj["sha"]

    # 6c) create blob for url if necessary
    if REGISTER:
        r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/blobs"),
                auth=(BOT_USER, BOT_PASS),
                json=gh_encode(REPO_URLS[0]+"\n"))
        rj = r.json()
        URL_BLOB_SHA = rj["sha"]


    # 7) create new tree
    tree_data = {
        "base_tree": PREV_TREE_SHA,
        "tree": [
            {
                "path": urljoin(PKG_NAME,"versions",VERSION,"requires"),
                "mode": "100644",
                "type": "blob",
                "sha": REQUIRE_BLOB_SHA
            },
            {
                "path": urljoin(PKG_NAME,"versions",VERSION,"sha1"),
                "mode": "100644",
                "type": "blob",
                "sha": SHA1_BLOB_SHA
            }
        ]
    }

    if REGISTER:
        tree_data["tree"].append({
            "path": urljoin(PKG_NAME,"url"),
            "mode": "100644",
            "type": "blob",
            "sha": URL_BLOB_SHA
        })

    r = requests.post(urljoin(GITHUB_API, "repos", BOT_USER, META_NAME, "git/trees"),
        auth=(BOT_USER, BOT_PASS),
        json=tree_data)
    rj = r.json()
    NEW_TREE_SHA = rj["sha"]

    # 7.5) get user info for commit
    r = requests.get(urljoin(GITHUB_API,"users",AUTHOR),
                auth=(BOT_USER, BOT_PASS))
    rj = r.json()
    AUTHOR_NAME = rj["name"]
    if AUTHOR_NAME is None:
        AUTHOR_NAME = AUTHOR

    AUTHOR_EMAIL = rj["email"]
    if AUTHOR_EMAIL is None:
        # get the email from the last commit by the author
        r = requests.get(urljoin(GITHUB_API, "repos", REPO_FULLNAME, "commits"),
                auth=(BOT_USER, BOT_PASS),
                params={"author": AUTHOR})
        rj = r.json()
        if rj:
            AUTHOR_EMAIL = rj[0]["commit"]["author"]["email"]
        else:
            # otherwise use fallback (may or may not link to the author)
            AUTHOR_EMAIL = AUTHOR + "@users.noreply.github.com"


    # 8) create commit
    if REGISTER:
        msg = "Register " + REPO_NAME + " " + TAG_NAME + " [" + HTML_URL + "]"
    else:
        msg = "Tag " + REPO_NAME + " " + TAG_NAME + " [" + HTML_URL + "]"
    r = requests.post(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/commits"),
            auth=(BOT_USER, BOT_PASS),
            json={
                "message": msg,
                "parents": [ PREV_COMMIT_SHA ],
                "tree": NEW_TREE_SHA,
                "author": {
                    "name": AUTHOR_NAME,
                    "email": AUTHOR_EMAIL
                },
                "committer": {
                    "name": "AttoBot",
                    "email": "*****@*****.**"
                }
            })
    rj = r.json()
    NEW_COMMIT_SHA = rj["sha"]

    # 9) Create new ref (i.e. branch)
    NEW_BRANCH_NAME = PKG_NAME + "/" + TAG_NAME
    r = requests.post(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/refs"),
            auth=(BOT_USER, BOT_PASS),
            json={
                "ref": "refs/heads/" + NEW_BRANCH_NAME,
                "sha": NEW_COMMIT_SHA
            })

    if r.status_code == 422:
        EXISTING = True
        # 9a) PR already exists, update the ref instead
        r = requests.patch(urljoin(GITHUB_API,"repos", BOT_USER, META_NAME, "git/refs/heads", NEW_BRANCH_NAME),
                auth=(BOT_USER, BOT_PASS),
                json={
                    "sha": NEW_COMMIT_SHA,
                    "force": True
                })
    else:
        EXISTING = False

    # 10) Get travis link
    # this sometimes misses, if the tag has not yet made it to travis
    TRAVIS_PR_LINE = ""
    r = requests.get(urljoin("https://api.travis-ci.org/","repos",REPO_FULLNAME,"branches",TAG_NAME))
    if r.status_code == requests.codes.ok:
        rj = r.json()
        build_id = str(rj["branch"]["id"])
        if SHA1 == rj["commit"]["sha"]:
            badge_url = urljoin("https://api.travis-ci.org/", REPO_FULLNAME + ".svg?branch=" + TAG_NAME)
            build_url = urljoin("https://travis-ci.org/", REPO_FULLNAME, "builds", build_id)
            TRAVIS_PR_LINE = "Travis: [![Travis Build Status](" + badge_url + ")](" + build_url + ")\n"

    # 11) Create pull request
    if REGISTER:
        title = "Register new package " + REPO_NAME + " " + TAG_NAME
        body = "Repository: [" + REPO_FULLNAME + "](" + REPO_HTML_URL + ")\n" + \
            "Release: [" + TAG_NAME + "](" + HTML_URL + ")\n" + \
            TRAVIS_PR_LINE + \
            "cc: @" + AUTHOR + "\n" + \
            "\n" + TAG_REQ + "\n" + \
            "\n@" + AUTHOR + " This PR will remain open for 24 hours for feedback (which is optional). If you get feedback, please let us know if you are making changes, and we'll merge once you're done."
    else:
        diff_url = urljoin(REPO_HTML_URL, "compare", LAST_SHA1 + "..." + SHA1)

        req_diff = "".join(difflib.unified_diff(
            LAST_REQUIRE.splitlines(True),
            REQUIRE.splitlines(True),
            LAST_VERSION + "/requires",
            VERSION + "/requires"))

        if req_diff == "":
            req_status = "no changes"
        else:
            # Ensure closing ``` is on its own line
            if not req_diff.endswith("\n"):
                req_diff += "\n"
            req_status = "\n```diff\n" + req_diff + "```"

        title = "Tag " + REPO_NAME + " " + TAG_NAME
        body = "Repository: [" + REPO_FULLNAME + "](" + REPO_HTML_URL + ")\n" + \
            "Release: [" + TAG_NAME + "](" + HTML_URL + ")\n" + \
            TRAVIS_PR_LINE + \
            "Diff: [vs v" + LAST_VERSION + "](" + diff_url + ")\n" + \
            "`requires` vs v" + LAST_VERSION + ": " + req_status + "\n" + \
            "cc: @" + AUTHOR + "\n" + \
            "\n" + TAG_REQ

    if EXISTING:
        r = requests.get(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "pulls"),
                params={
                    "head": BOT_USER + ":" + NEW_BRANCH_NAME,
                    "state": "all"
                })
        rj = r.json()[0] # assume it is the only return value

        r = requests.post(rj["comments_url"],
                auth=(BOT_USER, BOT_PASS),
                json={
                    "body": body,
                })
        rj = r.json()

        return "Comment created: " + rj["url"]

    else:
        r = requests.post(urljoin(GITHUB_API, "repos", META_ORG, META_NAME, "pulls"),
                auth=(BOT_USER, BOT_PASS),
                json={
                    "title": title,
                    "body": body,
                    "head": BOT_USER + ":" + NEW_BRANCH_NAME,
                    "base": META_BRANCH
                })
        rj = r.json()

        return "PR created: " + rj["url"]
示例#29
0
def updateRemediationPayloadChecks(payload):
    config_file = 'config.ini'
    AUTHURL = 'AuthUrl'
    CLIENT_ID = 'ClientId'
    CLIENT_SECRET = 'ClientSecretKey'
    CONFIG_CREDS_SECTION = 'Credentials'
    CONFIG_URL_SECTION = 'RequestURL'
    UPDATE_REMEDIATION_PAYLOAD_URL = 'UpdateRemediatinPayloadUrl'

    exists = os.path.isfile(config_file)
    if exists:
        print('Config file : ' + config_file + ' found')
    else:
        print('Unable to load configuration, File : ' + config_file +
              ' is missing.')
        raise Exception('Unable to load configuration, config.ini is missing.')

    config = configparser.ConfigParser()
    config.read(config_file)

    auth_request = {}
    auth_headers = {}
    auth_headers_update_status = {}

    authurl = config.get(CONFIG_URL_SECTION, AUTHURL)
    #client_id = config.get(CONFIG_CREDS_SECTION, CLIENT_ID)
    client_id = os.getenv("ClientID", "")
    #client_secret = config.get(CONFIG_CREDS_SECTION, CLIENT_SECRET)
    client_secret = os.getenv("ClientSecretKey", "")

    if client_id == "" or client_secret == "":
        raise Exception(
            "ClientID and/or ClientSecretKey in enviornment variables are not specified."
        )

    updateRemediationPayloadURL = config.get(CONFIG_URL_SECTION,
                                             UPDATE_REMEDIATION_PAYLOAD_URL)

    if client_id == "" or client_secret == "" or authurl == "" or updateRemediationPayloadURL == "":
        raise Exception("One or more values are empty in " + config_file)

    auth_request['client_id'] = client_id
    auth_request['client_secret'] = client_secret
    auth_headers['Content-type'] = 'application/json'
    auth_request_json = json.dumps(auth_request)
    payload_json = json.dumps(payload)
    auth_response = requests.post(authurl,
                                  data=auth_request_json,
                                  headers=auth_headers)
    if auth_response.status_code == 200:
        print("auth token generated successfully, " + "http status code is " +
              str(auth_response.status_code))
        print("auth response json : " + json.dumps(auth_response.json()))
        access_token = auth_response.json()['access_token']
        x_epmp_customer_id = auth_response.json()['x-epmp-customer-id']
        x_epmp_domain_id = auth_response.json()['x-epmp-domain-id']
        print("access_token :: " + access_token)
        print("customer_id :: " + x_epmp_customer_id)
        print("domain_id :: " + x_epmp_domain_id)
        auth_headers_update_status['Authorization'] = access_token
        auth_headers_update_status['x-epmp-customer-id'] = x_epmp_customer_id
        auth_headers_update_status['x-epmp-domain-id'] = x_epmp_domain_id
        auth_response_update_status = requests.post(
            updateRemediationPayloadURL,
            data=payload_json,
            headers=auth_headers_update_status)
        if auth_response_update_status.status_code == 200:
            print("Remediation Payload updated successfully")
            print("Returned Message : " + auth_response_update_status.text)
        else:
            raise Exception(
                "Error while updating remediation Payload, HttpStatusCode : " +
                str(auth_response_update_status.status_code) + ", Details:- " +
                auth_response_update_status.text)
    else:
        raise Exception("Failed to generate auth token, " +
                        "http status code is " +
                        str(auth_response.status_code) + " , " +
                        auth_response.text)
示例#30
0
def handler(context, inputs):
    def invite(header,
               id,
               usernames,
               org_role='org_member',
               cloud_assembly=False,
               code_stream=False,
               service_broker=False,
               log_intelligence=False,
               network_insight=False):
        baseurl = 'https://console.cloud.vmware.com'
        uri = f'/csp/gateway/am/api/orgs/{id}/invitations'
        payload = {
            'usernames': usernames,
            'orgRoleName': org_role,
            'serviceRolesDtos': []
        }
        if cloud_assembly:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/Zy924mE3dwn2ASyVZR0Nn7lupeA_'),
                'serviceRoleNames':
                ['automationservice:user', 'automationservice:cloud_admin']
            })

        if code_stream:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/ulvqtN4141beCT2oOnbj-wlkzGg_'),
                'serviceRoleNames': [
                    'CodeStream:administrator', 'CodeStream:viewer',
                    'CodeStream:developer'
                ]
            })

        if service_broker:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/Yw-HyBeQzjCXkL2wQSeGwauJ-mA_'),
                'serviceRoleNames': ['catalog:admin', 'catalog:user']
            })

        if log_intelligence:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/7cJ2ngS_hRCY_bIbWucM4KWQwOo_'),
                'serviceRoleNames':
                ['log-intelligence:admin', 'log-intelligence:user']
            })

        if network_insight:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/9qjoNafDp9XkyyQLcLCKWPsAir0_'),
                'serviceRoleNames': ['vrni:admin', 'vrni:user']
            })
        url = baseurl + uri
        print(url)
        print(payload)
        return requests.post(url, json=payload, headers=header)

    baseUri = inputs['baseUri']
    casToken = inputs['casToken']

    url = baseUri + "/csp/gateway/am/api/auth/api-tokens/authorize?refresh_token=" + casToken
    headers = {
        "Accept": "application/json",
        "Content-Type": "application/json"
    }
    payload = {}

    results = requests.post(url, json=payload, headers=headers)

    print(results.json()["access_token"])
    bearer = "Bearer "
    bearer = bearer + results.json()["access_token"]
    headers = {
        "Accept": "application/json",
        "Content-Type": "application/json",
        "Authorization": bearer,
        'csp-auth-token': results.json()["access_token"]
    }

    results = invite(header=headers,
                     id=inputs['orgId'],
                     usernames=inputs['usernames'],
                     cloud_assembly=True,
                     code_stream=True,
                     service_broker=True)

    print(results)
def lambda_handler(event, context):
    ec2 = boto3.client('ec2', region_name=region)
    ec2.stop_instances(InstanceIds=instances)
    payload=json.dumps(payload_dic)
    message=requests.post(url, data=payload)
    print(message)
示例#32
0
def polaris_new_user(new_user_email, polaris_username, polaris_password,
                     polaris_url, email_domain):
    """Connect to Polaris and create a new user """
    # New Email to add to Polaris
    new_user_email = new_user_email

    # Connect to Polaris and Get an API Token
    token_header = {
        'Content-Type': 'application/json',
        'Accept': 'application/json'
    }

    token_body = {"username": polaris_username, "password": polaris_password}
    logger.info('Attempting to get Polaris Token')
    token_request = requests.post('https://{}/api/session'.format(polaris_url),
                                  json=token_body,
                                  headers=token_header)

    if token_request.status_code == 200:
        result = token_request.json()
        access_token = result['access_token']
        logger.info('Successfully received the Polaris Token')
    else:
        logger.info('Failed to get Polaris Token')
        raise Exception(
            "Query failed to run by returning code of {}. {}".format(
                token_request.status_code, token_body))

    # Creat a New User
    authentication_header = {"authorization": "Bearer {}".format(access_token)}

    # grant user access to the admin role
    query = {
        "operationName":
        "InviteUser",
        "variables": {
            "email": new_user_email,
            "roleIds": "00000000-0000-0000-0000-000000000000"
        },
        "query":
        "mutation InviteUser($email: String!, $roleIds: [String!]!) {createUser(email: $email, roleIds: $roleIds)}"
    }

    logger.info('Creating new user.')
    new_user_request = requests.post(
        'https://{}/api/graphql'.format(polaris_url),
        json=query,
        headers=authentication_header)

    if new_user_request.status_code == 200:
        result = new_user_request.json()
        logger.info('Successfully sent the create API call.')
    else:
        logger.info('The create API call failed.')
        raise Exception(
            "Query failed to run by returning code of {}. {}".format(
                new_user_request.status_code, query))

    if 'errors' in result:
        if result['errors'][0][
                'message'] == "ALREADY_EXISTS: cant create user as conflicts with existing one":
            return 'The email {} already exists or has been previously invited to Polaris. See the Forgot password option at https://{} for access.'.format(
                new_user_email, polaris_url)
        elif result['errors'][0][
                'message'] == "INVALID_ARGUMENT: cant create user as email address is invalid":
            return 'Please only enter a valid email address (ex: /polaris first.last@{}). You entered /polaris {}.'.format(
                email_domain, new_user_email)
        else:
            return result['errors'][0]['message']
    elif 'data' in result:
        return 'Sucessfully created a new account for {}.'.format(
            new_user_email)
    else:
        return 'An unknown error has occured.'
示例#33
0
def lambda_handler(event, context):
    try:
        confirm = event['currentIntent']['slots']['Confirm']
        
        if confirm.lower() == 'yes':
            pi_ip = '123.243.247.182:5000'
            full_api_url = "http://{}/train".format(pi_ip)
            
            response = requests.post(full_api_url, data=payload)
                
            if response.status_code == 200:
                botResponse = "I have completed learning the new faces. Please start the facial recognition"
                action = {
                    "type": "Close",
                    "fulfillmentState": "Fulfilled",
                    "message": {
                        "contentType": "SSML",
                        "content": "<speak>{}</speak>".format(botResponse)
                        }
                    }
            else:
                raise Exception()
        else:
             botResponse = "For my to recognise you I need to learn what you look like. Please remember to train me later"
                action = {
                    "type": "Close",
                    "fulfillmentState": "Fulfilled",
                    "message": {
                        "contentType": "SSML",
                        "content": "<speak>{}</speak>".format(botResponse)
                        }
                    }

    except:
        botResponse = "Sorry I could not complete the training. Please try again"
        action = {
            "type": "Close",
            "fulfillmentState": "Fulfilled",
            "message": {
                "contentType": "SSML",
                "content": "<speak>{}</speak>".format(botResponse)
            }
        }
    finally:
        try:
            intent = event['currentIntent']['name']
            transcript = event['inputTranscript']
            dbid = str(uuid.uuid4())
            dynamodb = boto3.resource("dynamodb")
            table = dynamodb.Table('LexHistory')
            table.put_item(
                Item={
                    'UUID': dbid,
                    'intent': intent,
                    'transcript': transcript,
                    'response': botResponse
                    }
                )
            
        except:
            action = {
            "type": "Close",
            "fulfillmentState": "Fulfilled",
            "message": {
                "contentType": "PlainText",
                "content": "Error Saving to Database uuid: {} intent: {} transcript: {} response: {}".format(dbid, intent, transcript, botResponse)
            }
        }
        return {
            "dialogAction": action
        }   
示例#34
0
def lambda_handler(event, context):
    post_headers = {
        "Authorization": "Bearer {}".format(os.environ['FB_API_TOKEN']),
    }
    url = "https://graph.facebook.com/{}/feed".format(
        os.environ['FB_GROUP_ID'], )

    headers = event["headers"]
    body = event["body"]

    github_event = headers.get("X-GitHub-Event")

    computed_signature = hmac.new(
        bytes(os.environ["GITHUB_WEBHOOK_SECRET"], "UTF-8"),
        bytes(body, "UTF-8"), hashlib.sha1).hexdigest()

    sha, signature = headers.get("X-Hub-Signature").split('=')

    if computed_signature != signature:
        print("Invalid signature: ", computed_signature, signature)
        return {"statusCode": 400, "body": "Invalid signature"}

    handled_events = {'pull_request', 'status'}

    if github_event not in handled_events:
        return {"statusCode": 200, "body": "Unsupported event"}

    body = json.loads(body)

    if github_event == 'pull_request':
        allowed_actions = {"opened", "closed", "reopened"}
        action = body.get("action")
        if action not in allowed_actions:
            return {
                "statusCode": 200,
                "body": "Unsupported pull request action"
            }

        if action == "closed" and body.get("merged"):
            action = "merged"

        pr_data = body.get("pull_request")
        repo = body.get("repository")
        sender = body.get("sender")
        msg = "[{}] {} **{}** a pull request\n[**# {} {}**]({})\n{}\n".format(
            repo.get("full_name"),
            sender.get("login"),
            action,
            body.get("number"),
            pr_data.get("title"),
            pr_data.get("html_url"),
            pr_data.get("body"),
        )

    if github_event == 'status':
        allowed_states = {"failure", "error"}
        state = body.get("state")
        if state not in allowed_states:
            return {"statusCode": 200, "body": "Unsupported status action"}
        repo = body.get("name")
        ci_project = body.get("context").split(":")[-1].strip()

        ci_url = body.get("target_url")
        ci_build_number = ci_url.split("/")[-1].split("?")[0]
        commit_data = body.get("commit")
        author_data = commit_data.get("author")
        author = author_data.get("login")

        commit_commit_data = commit_data.get("commit")
        commit_message = commit_commit_data.get("message").replace("\n", " ")
        commit_url = commit_data.get("html_url")
        commit_number = commit_url.split("/")[-1][0:7]
        branch_data = body.get("branches")[0]
        branch_name = branch_data.get("name")
        retry_url = "https://circleci.com/actions/retry/github/{}/{}".format(
            repo, ci_build_number)
        msg = "[{}] **{}**: {}'s circleci build # [{}]({}) ({})\n\nBranch [{}/{}]({}):\n>{}\n\nActions: [Rebuild]({})".format(
            repo,
            state.upper(),
            author,
            ci_build_number,
            ci_url,
            ci_project,
            branch_name,
            commit_number,
            commit_url,
            commit_message,
            retry_url,
        )

    data = {
        'formatting': 'MARKDOWN',
        'message': msg,
    }
    requests.post(url, headers=post_headers, data=data)
    print("Posted to group!")
    return {"statusCode": 200, "body": "Victory!"}
示例#35
0
 def send_message(self, message: SlackMessage):
     if self.webhook_url:
         slack_message = message.slack_format()
         requests.post(self.webhook_url, json.dumps(slack_message))
     else:
         log.warning(f"Slack webhook unconfigured. Ignoring effort to submit Slack Notification.")
def lambda_handler(event, context):
    for record in event['Records']:

        # Kinesis data is base64 encoded so decode here
        print(record['kinesis']['data'])
        payload = base64.b64decode(record['kinesis']['data'])

        # Change from tab delimited to dict
        paramstring = payload.split("\t")
        print(paramstring)

        # Grab the fields I want
        if paramstring[5] == "struct" and paramstring[53] != "page_view":
            print("entered struct clause")
            event = paramstring[53]
            now = datetime.now(tzoffset('GMT', +1 * 60 * 60)).isoformat()
            now1 = now[0:19]
            now2 = now[26:35]
            ts = now1 + now2
            language_learnt = paramstring[55]
            interface_language = paramstring[54]

            if interface_language == 'enc':
                interface_language = 'en'
            else:
                continue

            platform = paramstring[1]
            uid = paramstring[12]
            params = paramstring[56]
            params = json.loads(params.replace("'", '"'))
            # print(event)
            # print(ts)
            # print(language_learned)
            # print(interface_language)
            # print(platform)
            # print(uid)
            # print(params)

            event_array = {}
            event_array["external_id"] = uid
            event_array["app_id"] = APPBOY_APP_ID
            event_array["name"] = event
            event_array["time"] = ts
            event_array["language_learnt"] = language_learnt
            for param in params:
                event_array[param] = params[param]

            event_array_list = []
            event_array_list.append(event_array)

            attributes = {}
            attributes["external_id"] = uid
            attributes["language"] = interface_language

            attributes_list = []
            attributes_list.append(attributes)

            # Ping to AppBoy
            url = 'https://api.appboy.com/users/track'
            headers = {"Content-Type": "application/json"}
            data = {"app_group_id": APPBOY_APP_GROUP_ID, "attributes": attributes_list, "events": event_array_list}

            data_to_app = json.dumps(data)

            r = requests.post(url, data=data_to_app, headers=headers)

            if r.status_code == 200:
                print("Sent Successfully these parameters - %s" % data_to_app)
            else:
                print("Problem with the request \nsent %s and returned this error\n" % data_to_app)
                print(r.content)

        else:
            continue
示例#37
0
def AcceptGrantHandler(directive):
    logger.info('Handling AcceptGrant')

    d = pyASH.Request(directive)

    # Retrieve tokens using Grant code
    lwa_client_id = os.environ['lwa_client_id']
    lwa_client_secret = os.environ['lwa_client_secret']

    payload = {
        'grant_type':'authorization_code',
        'code': d.code,
        'client_id':lwa_client_id,
        'client_secret':lwa_client_secret,
    }

    r = requests.post("https://api.amazon.com/auth/o2/token", data=payload)

    if r.status_code != 200:
        errmsg = 'Unable to receive access/refresh tokens.  Return code was ' + str(r.status_code)
        logger.warn(errmsg)
        return pyASH.ErrorResponse('Alexa', 'ACCEPT_GRANT_FAILED', errmsg, correlationToken=d.correlationToken)

    try:
        response = r.json()
        gateway_access_token = response['access_token']
        gateway_refresh_token = response['refresh_token']
    except KeyError:
        errmsg = 'Tokens not in response'
        logger.warn(errmsg)
        return pyASH.ErrorResponse('Alexa', 'ACCEPT_GRANT_FAILED', errmsg, correlationToken=d.correlationToken)

    try:
        response = r.json()
        gateway_token_duration = response['expires_in']
    except KeyError:
        gateway_token_duration = 900 # Set default token duration to 15 minutes

    # Retrieving Users' profile
    try:
        user_data = get_user_profile(grantee_access_token)
    except requests.exceptions.HTTPError as err:
        errmsg = 'Unable to retrieve profile.  Error was ' + str(err)
        logger.warn(errmsg)
        return pyASH.ErrorResponse('Alexa', 'ACCEPT_GRANT_FAILED', errmsg, correlationToken=d.correlationToken)

    if user_data['user_id'] == '':
        errmsg = 'Requested profile but user_id not received'
        logger.warn(errmsg)
        return pyASH.ErrorResponse('Alexa', 'ACCEPT_GRANT_FAILED', errmsg, correlationToken=d.correlationToken)


    dynamodb_client = boto3.resource('dynamodb')
    key = { 'customer': user_data['user_id'] }

    logger.info( 'handle_authorization: profile received with values user_id [' + user_data['user_id'] + '] user_email [' + user_data['email'] + '] user_name [' + user_data['name'] + ']' )

    # Store tokens to database
    table = dynamodb_client.Table('preampskill_customers')
    response = table.update_item (
        Key = key,
        UpdateExpression = "set user_name = :n, email = :e, access_token = :a, refresh_token = :r, expires_in = :i",
        ExpressionAttributeValues =  {
            ':n': user_data['name'],
            ':e': user_data['email'],
            ':a': gateway_access_token,
            ':r': gateway_refresh_token,
            ':i': gateway_token_duration
        },
        ReturnValues="UPDATED_NEW"
    )

    # Store
    return pyASH.Response(directive)
示例#38
0
 def setUp(self):
     self.resp = requests.post(
         '%s/post' % HTTPBIN_ORIGIN,
         data={'username': '******', 'password': '******'},
         headers={'X-File': 'requests'})
示例#39
0
                                 region_name=region_name)
except Exception as e:
    print(e)

try:
    dynamodb = boto3.resource('dynamodb')
    table = dynamodb.Table(intezer_analysis_url_2_sha256_table)
except Exception as E:
    print(E)


def return_code(status, body):
    return {"statusCode": status, "body": json.dumps(body)}


response = requests.post(intezer_api_base_url + '/get-access-token',
                         json={'api_key': intezer_api_key})
response.raise_for_status()
session = requests.session()
session.headers['Authorization'] = 'Bearer %s' % response.json()['result']


def lambda_handler(event, context):
    try:
        if event['pathParameters'] and event['pathParameters']['proxy']:
            sha256 = event['pathParameters']['proxy']
            fileObject = getFileObject(Bucket=malscanbot_s3_bucket_name,
                                       Key=sha256)
            if fileObject['code'] == 'SUCCESS':
                files = {'file': fileObject['content']}
                response = session.post(intezer_api_base_url + '/analyze',
                                        files=files)
示例#40
0
def microsoft_authenticate(username, password, useragent):
    """
    Attempts to authenticate to the Microsoft login portal
    at login.live.com. If successful, the "success" key is
    set to True. Otherwise the login failed.

    Params:
        (str)username  - Username to authenticate as.
        (str)password  - Password to authenticate with.
        (str)useragent - User agent string to pass during
                         authentication request.

    Returns:
        dict - Dictionary with keys:
            timestamp
            username
            password
            success
            change
            2fa_enabled
            type
            code
            name
            action
            headers
            cookies
    """
    tokens = fetch_session(useragent)
    ts = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
    data_response = {
        'timestamp': ts,
        'username': username,
        'password': password,
        'success': False,
        'change': False,
        '2fa_enabled': False,
        'type': None,
        'code': None,
        'name': None,
        'action': None,
        'headers': [],
        'cookies': [],
    }
    headers = {}
    headers["Host"] = "login.live.com"
    headers["Connection"] = "close"
    headers["Cache-Control"] = "max-age=0"
    headers["Origin"] = "https://login.live.com"
    headers["Upgrade-Insecure-Requests"] = "1"
    headers["Content-Type"] = "application/x-www-form-urlencoded"
    headers["User-Agent"] = useragent
    headers[
        "Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
    headers["Accept-Encoding"] = "gzip, deflate"
    headers["Accept-Language"] = "en-US,en;q=0.9"
    headers["Cookie"] = "MSPOK={};".format(tokens["mspok"])

    payload = {
        "i13": "0",
        "login": username,
        "loginfmt": username,
        "type": "11",
        "LoginOptions": "3",
        "lrt": "",
        "lrtPartition": "",
        "hisRegion": "",
        "hisScaleUnit": "",
        "passwd": password,
        "ps": "2",
        "psRNGCDefaultType": "",
        "psRNGCEntropy": "",
        "psRNGCSLK": "",
        "canary": "",
        "ctx": "",
        "hpgrequestid": "",
        "PPFT": tokens["flow_token"],
        "PPSX": "Passport",
        "NewUser": "******",
        "FoundMSAs": "",
        "fspost": "0",
        "i21": "0",
        "CookieDisclosure": "0",
        "IsFidoSupported": "1",
        "i2": "1",
        "i17": "0",
        "i18":
        "__ConvergedLoginPaginatedStrings%7C1%2C__ConvergedLogin_PCore%7C1%2C",
        "i19": "26144"
    }
    url = "https://login.live.com/ppsecure/post.srf"

    try:
        resp = requests.post(url,
                             data=payload,
                             headers=headers,
                             allow_redirects=False)
        if resp.status_code == 302:
            data_response["success"] = True

    except Exception as e:
        data_response["error"] = e

    return data_response
示例#41
0
def lambda_handler(event, context):
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    isTest = None
    try:
        isTest = event["isTest"]
    except KeyError:
        logger.info("isTest:{}".format(isTest))

    if isTest:
        logger.addHandler(logging.StreamHandler())

    # build datetime: batch job for yesterday
    dtKst = get_datetime_kst_now()
    ts = dtKst.timestamp()
    dKst = dtKst.strftime('%Y-%m-%d')
    tKst = dtKst.strftime('%H:%M')

    logger.info("dtKst:{} dKst:{} tKst: {} timestamp:{}".format(
        dtKst, dKst, tKst, ts))

    # get currenct from coinone
    # r1 = requests.get(api_coinone_currency)
    # currency = r1.json()
    # if currency["result"] != "success":
    #     raise Exception("invalid response in coinone currency api. errorcode is " + currency["errorCode"])
    # usd_to_krw = currency['currency']

    # get tables
    dynamodb = boto3.resource('dynamodb')
    table_coinone_ticker = dynamodb.Table('coinone_ticker')
    table_coinmarketcap_ticker = dynamodb.Table('coinmarketcap_ticker')

    # get tickers for coins in coinone_ticker table
    coinone_ticker = {}
    for c in coinone_coins:
        result = table_coinone_ticker.query(
            KeyConditionExpression=Key('coin').eq(c),
            ScanIndexForward=False,
            Limit=1,
        )

        if len(result["Items"]) > 0:
            coinone_ticker[c] = result["Items"][0]

        logger.info("{} coinone items: {}".format(c, result["Items"]))

    # get tickers for coins in coinmarketcap_table
    coinmarketcap_ticker = {}
    for c in coinone_coins:
        result = table_coinmarketcap_ticker.query(
            KeyConditionExpression=Key('coin').eq(c),
            ScanIndexForward=False,
            Limit=1,
        )

        if len(result["Items"]) > 0:
            coinmarketcap_ticker[c] = result["Items"][0]

        logger.info("{} coinmarketcap items: {}".format(c, result["Items"]))

    slack_text = "[{} KST]\n".format(tKst)
    for c in coinone_coins:
        if c not in coinone_ticker or c not in coinmarketcap_ticker:
            continue

        coinone_price = Decimal(coinone_ticker[c]["price_krw"])
        coinmarketcap_price = Decimal(coinmarketcap_ticker[c]["price_krw"])
        price_diff_percent = coinone_price / coinmarketcap_price * 100 - 100
        price_diff_percent = round(price_diff_percent, 2)
        logger.info("{} {}".format(coinone_price, coinmarketcap_price))

        if price_diff_percent > 0:
            price_diff_percent = "+{}".format(price_diff_percent)
        slack_text += "{} : *{}%* (co: {:,}, cmkc: {:,})\n".format(
            c, price_diff_percent, coinone_price,
            round(coinmarketcap_price, 0))

    logger.info(slack_text)

    if isTest is True:
        return

    response = requests.post(webhook_url,
                             data=json.dumps({
                                 "text": slack_text,
                             }),
                             headers={'Content-Type': 'application/json'})
    if response.status_code != 200:
        logger.error("Failed to send slack message: {}".format(slack_text))
        logger.error("Status Code: {}".format(response.status_code))
        raise ValueError(
            'Request to slack returned an error %s, the response is:\n%s' %
            (response.status_code, response.text))
示例#42
0
def lambda_handler(event, context):

    sqs = boto3.client('sqs')

    queue_url = 'https://sqs.us-east-1.amazonaws.com/313852684660/restaurant_order'

    # Receive message from SQS queue
    response = sqs.receive_message(QueueUrl=queue_url,
                                   AttributeNames=['SentTimestamp'],
                                   MaxNumberOfMessages=1,
                                   MessageAttributeNames=['All'],
                                   VisibilityTimeout=0,
                                   WaitTimeSeconds=0)

    if ('Messages' not in response):
        range_loop = 0
    else:
        range_loop = len(response['Messages'])

    #print(range_loop)
    for i in range(range_loop):
        message = response['Messages'][i]
        receipt_handle = message['ReceiptHandle']

        # # Delete received message from queue
        del_response = sqs.delete_message(QueueUrl=queue_url,
                                          ReceiptHandle=receipt_handle)

        message = message['MessageAttributes']
        location = message['Location']['StringValue']
        party_people = message['PeopleNum']['StringValue']
        cuisine = message['Categories']['StringValue']
        timestamp = message['DiningTime']['StringValue']
        phone_number = message['Phone_number']['StringValue']
        Dining_Date = message['Dining_Date']['StringValue']
        print(cuisine)

        businessIds = []
        #cuisine = 'indian'
        url = 'hhttps://search-restaurant-jb43kkwjs42wmaiw54ythrodfu.us-east-1.es.amazonaws.com/restaurant/Restaurant/_search?q=' + cuisine
        r = requests.post(url)
        x = r.json()
        # print(x)

        for items in x['hits']['hits']:
            #print(items['_source']['RestaurantID'])

            #print(x['hits']['hits'][0]['_source']['RestaurantID'])
            dynamodb = boto3.resource('dynamodb', region_name='us-east-1')

            table = dynamodb.Table('yelp-restaurants')
            #businessIds = []
            businessIds.append(items['_source']['RestaurantID'])

        output = getDynemoDbData(table, businessIds)
        print(output)

        sns_client = boto3.client('sns')
        sns_client.publish(PhoneNumber=phone_number,
                           Message="Hi, your results for the request for " +
                           cuisine + " are as follows: " + output)

    return {
        'statusCode': 200,
        'body': json.dumps('Hello from Lambda! Projecty successuful')
    }
示例#43
0
    def invite(header,
               id,
               usernames,
               org_role='org_member',
               cloud_assembly=False,
               code_stream=False,
               service_broker=False,
               log_intelligence=False,
               network_insight=False):
        baseurl = 'https://console.cloud.vmware.com'
        uri = f'/csp/gateway/am/api/orgs/{id}/invitations'
        payload = {
            'usernames': usernames,
            'orgRoleName': org_role,
            'serviceRolesDtos': []
        }
        if cloud_assembly:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/Zy924mE3dwn2ASyVZR0Nn7lupeA_'),
                'serviceRoleNames':
                ['automationservice:user', 'automationservice:cloud_admin']
            })

        if code_stream:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/ulvqtN4141beCT2oOnbj-wlkzGg_'),
                'serviceRoleNames': [
                    'CodeStream:administrator', 'CodeStream:viewer',
                    'CodeStream:developer'
                ]
            })

        if service_broker:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/Yw-HyBeQzjCXkL2wQSeGwauJ-mA_'),
                'serviceRoleNames': ['catalog:admin', 'catalog:user']
            })

        if log_intelligence:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/7cJ2ngS_hRCY_bIbWucM4KWQwOo_'),
                'serviceRoleNames':
                ['log-intelligence:admin', 'log-intelligence:user']
            })

        if network_insight:
            payload['serviceRolesDtos'].append({
                'serviceDefinitionLink': ('/csp/gateway/slc/api/definitions'
                                          '/external'
                                          '/9qjoNafDp9XkyyQLcLCKWPsAir0_'),
                'serviceRoleNames': ['vrni:admin', 'vrni:user']
            })
        url = baseurl + uri
        print(url)
        print(payload)
        return requests.post(url, json=payload, headers=header)
示例#44
0
def lambda_handler(event, context):
    OAUTH_token = event['context']['git-token']
    OutputBucket = event['context']['output-bucket']
    temp_archive = '/tmp/archive.zip'

    # Identify git host flavour
    hostflavour = 'generic'
    if 'X-Hub-Signature' in event['params']['header'].keys():
        hostflavour = 'githubent'
    elif 'X-Gitlab-Event' in event['params']['header'].keys():
        hostflavour = 'gitlab'
    elif 'User-Agent' in event['params']['header'].keys():
        if event['params']['header']['User-Agent'].startswith(
                'Bitbucket-Webhooks'):
            hostflavour = 'bitbucket'

    headers = {}
    if hostflavour == 'githubent':
        archive_url = event['body-json']['repository']['archive_url']
        owner = event['body-json']['repository']['owner']['name']
        name = event['body-json']['repository']['name']
        branch = event['body-json']['ref'].replace('refs/heads/', '')
        # replace the code archive download and branch reference placeholders
        archive_url = archive_url.replace('{archive_format}',
                                          'zipball').replace(
                                              '{/ref}', '/master')
        # add access token information to archive url
        archive_url = archive_url + '?access_token=' + OAUTH_token
    elif hostflavour == 'gitlab':
        # https://gitlab.com/jaymcconnell/gitlab-test-30/repository/archive.zip?ref=master
        archive_url = event['body-json']['project']['http_url'].replace(
            '.git', '/repository/archive.zip?ref=master'
        ) + '&private_token=' + OAUTH_token
        owner = event['body-json']['project']['namespace']
        name = event['body-json']['project']['name']
        ## TODO support branch
        #branch =
    elif hostflavour == 'bitbucket':
        archive_url = event['body-json']['repository']['links']['html'][
            'href'] + '/get/master.zip'
        owner = event['body-json']['repository']['owner']['username']
        name = event['body-json']['repository']['name']
        ## TODO support branch
        #branch =
        r = requests.post('https://bitbucket.org/site/oauth2/access_token',
                          data={'grant_type': 'client_credentials'},
                          auth=(event['context']['oauth-key'],
                                event['context']['oauth-secret']))
        if 'error' in r.json().keys():
            logger.error('Could not get OAuth token. %s: %s' %
                         (r.json()['error'], r.json()['error_description']))
            raise Exception('Failed to get OAuth token')
        headers['Authorization'] = 'Bearer ' + r.json()['access_token']

    s3_archive_file = "%s/%s/%s_%s_%s.zip" % (owner, name, owner, name, branch)
    # download the code archive via archive url
    logger.info('Downloading archive from %s' % archive_url)
    r = requests.get(archive_url, verify=verify, headers=headers)

    os.chdir('/tmp')
    z = zipfile.ZipFile(StringIO.StringIO(r.content))
    z.extractall()
    z_rootdir = z.namelist()[0]
    z.close()
    os.chdir(z_rootdir)
    z_new = zipfile.ZipFile(temp_archive, 'w')
    for dirname, subdirs, files in os.walk('.'):
        z_new.write(dirname)
        for filename in files:
            z_new.write(os.path.join(dirname, filename))
    z_new.close()

    # upload the archive to s3 bucket
    logger.info("Uploading zip to S3://%s/%s" %
                (OutputBucket, s3_archive_file))
    s3_client.upload_file(temp_archive, OutputBucket, s3_archive_file)
    logger.info('Upload Complete')
def lambda_handler(event, context):

    params = None
    logger.info('Event %s', event)
    OAUTH_token = event['context']['git-token']
    OutputBucket = event['context']['output-bucket']
    # temp_archive = '/tmp/archive.zip'
    # Identify git host flavour
    hostflavour = 'generic'
    if 'X-Hub-Signature' in event['params']['header'].keys():
        hostflavour = 'githubent'
    elif 'X-Gitlab-Event' in event['params']['header'].keys():
        hostflavour = 'gitlab'
    elif 'User-Agent' in event['params']['header'].keys():
        if event['params']['header']['User-Agent'].startswith(
                'Bitbucket-Webhooks'):
            hostflavour = 'bitbucket'
        elif event['params']['header']['User-Agent'].startswith(
                'GitHub-Hookshot'):
            hostflavour = 'github'
    elif event['body-json']['publisherId'] == 'tfs':
        hostflavour = 'tfs'

    headers = {}
    branch = 'master'
    if hostflavour == 'githubent':
        archive_url = event['body-json']['repository']['archive_url']
        owner = event['body-json']['repository']['owner']['name']
        name = event['body-json']['repository']['name']
        # replace the code archive download and branch reference placeholders
        archive_url = archive_url.replace('{archive_format}',
                                          'zipball').replace(
                                              '{/ref}', '/master')
        # add access token information to archive url
        archive_url = archive_url + '?access_token=' + OAUTH_token
    elif hostflavour == 'github':
        archive_url = event['body-json']['repository']['archive_url']
        owner = event['body-json']['repository']['owner']['login']
        name = event['body-json']['repository']['name']
        # replace the code archive download and branch reference placeholders
        branch_name = event['body-json']['ref'].replace('refs/heads/', '')
        archive_url = archive_url.replace('{archive_format}',
                                          'zipball').replace(
                                              '{/ref}', '/' + branch_name)
        # add access token information to archive url
        archive_url = archive_url + '?access_token=' + OAUTH_token
    elif hostflavour == 'gitlab':
        #https://gitlab.com/jaymcconnell/gitlab-test-30/repository/archive.zip?ref=master
        archive_root = event['body-json']['project']['http_url'].strip('.git')
        project_id = event['body-json']['project_id']
        branch = event['body-json']['ref'].replace('refs/heads/', '')
        archive_url = "https://gitlab.com/api/v4/projects/{}/repository/archive.zip".format(
            project_id)
        params = {'private_token': OAUTH_token, 'sha': branch}

        owner = event['body-json']['project']['namespace']
        name = event['body-json']['project']['name']

    elif hostflavour == 'bitbucket':
        branch = event['body-json']['push']['changes'][0]['new']['name']
        archive_url = event['body-json']['repository']['links']['html'][
            'href'] + '/get/' + branch + '.zip'
        owner = event['body-json']['repository']['owner']['username']
        name = event['body-json']['repository']['name']
        r = requests.post('https://bitbucket.org/site/oauth2/access_token',
                          data={'grant_type': 'client_credentials'},
                          auth=(event['context']['oauth-key'],
                                event['context']['oauth-secret']))
        if 'error' in r.json().keys():
            logger.error('Could not get OAuth token. %s: %s' %
                         (r.json()['error'], r.json()['error_description']))
            raise Exception('Failed to get OAuth token')
        headers['Authorization'] = 'Bearer ' + r.json()['access_token']
    elif hostflavour == 'tfs':
        archive_url = event['body-json']['resourceContainers']['account'][
            'baseUrl'] + 'DefaultCollection/' + event['body-json'][
                'resourceContainers']['project'][
                    'id'] + '/_apis/git/repositories/' + event['body-json'][
                        'resource']['repository']['id'] + '/items'
        owner = event['body-json']['resource']['pushedBy']['displayName']
        name = event['body-json']['resource']['repository']['name']
        pat_in_base64 = base64.encodestring(':%s' %
                                            event['context']['git-token'])
        headers['Authorization'] = 'Basic %s' % pat_in_base64
        headers['Authorization'] = headers['Authorization'].replace('\n', '')
        headers['Accept'] = 'application/zip'

    s3_archive_file = "%s/%s/%s/%s.zip" % (owner, name, branch, name)
    # download the code archive via archive url
    logger.info('Downloading archive from %s' % archive_url)
    r = requests.get(archive_url,
                     verify=verify,
                     headers=headers,
                     params=params)
    f = StringIO(r.content)
    zip = ZipFile(f)
    path = '/tmp/code'
    zipped_code = '/tmp/zipped_code'
    try:
        shutil.rmtree(path)
        os.remove(zipped_code + '.zip')
    except:
        pass
    finally:
        os.makedirs(path)
    # Write to /tmp dir without any common preffixes
    zip.extractall(path, get_members(zip))

    # Create zip from /tmp dir without any common preffixes
    shutil.make_archive(zipped_code, 'zip', path)
    logger.info("Uploading zip to S3://%s/%s" %
                (OutputBucket, s3_archive_file))
    s3_client.upload_file(zipped_code + '.zip', OutputBucket, s3_archive_file)
    logger.info('Upload Complete')
示例#46
0
def lambda_handler(event, context):

    # Read SQS event
    sqs_event(event)

    # Confirm user and group are allowed to be Elevated.
    group_on_queue = event['Records'][0]['messageAttributes']['Group']['stringValue']
    user_on_queue = event['Records'][0]['messageAttributes']['User']['stringValue']
    ad_user_on_queue = allowed_users[f'{user_on_queue}']

    if group_on_queue in allowed_groups and user_on_queue in allowed_users.keys():
        print('User and group allowed to continue')

        # Scan DynamoDB for current Elevated users before adding users (stops spam Elevating)
        print('scanning dynamodb table for current elveated users...')
        dbresponse = table.scan()
        items = dbresponse['Items']
        
        if len(items) > 0:
            
            current_users = []
            current_groups = []
            current_revoke = []
            
            for i in items:
                current_users.append(i['User'])
                current_groups.append(i['ADgroup'])
                current_revoke.append(i['RevokeAt'])
        
            # Check user isn't already elevated.
            if group_on_queue in current_groups and ad_user_on_queue in current_users:
                print('skipping as user already in group with time to spare...')
                response = requests.post(webhook, data=json.dumps({'text': ad_user_on_queue + ' is already elevated in ' + group_on_queue + ' ....' }))

            else:
                # User not in table, adding...
                print('adding user to group....')

                try:
                    print('Trying to add user to AD group...')
                    add_user_from_adgroup(ldap_server, ldap_user, ldap_password, ad_user_on_queue, group_on_queue)
                    response = requests.post(webhook, data=json.dumps({'text': ad_user_on_queue + ' elevated into ' + group_on_queue + ' ....' }))

                    try:
                        print('trying to add user to dynamodb...')
                        update_dynamodb(event)

                    except Exception as error:
                        print('Failed to update DynamoDB Table....')
                        print(error)
                        response = requests.post(webhook, data=json.dumps({'text': f'{error}' }))

                except Exception as error:
                    print('Failed to Add user to AD Group....')
                    print(error)
                    response = requests.post(webhook, data=json.dumps({'text': f'{error}' }))   
                    
                    
        else:
            # Table empty, adding user...
            print('DynamoDB Table is empty, elevate new user.')
            try:
                print('Trying to add user to AD group...')
                add_user_from_adgroup(ldap_server, ldap_user, ldap_password, ad_user_on_queue, group_on_queue)
                response = requests.post(webhook, data=json.dumps({'text': ad_user_on_queue + ' elevated into ' + group_on_queue + ' ....' }))

                try:
                    print('trying to add user to dynamodb...')
                    update_dynamodb(event)

                except Exception as error:
                    print('Failed to update DynamoDB Table....')
                    print(error)
                    response = requests.post(webhook, data=json.dumps({'text': f'{error}' }))

            except Exception as error:
                print('Failed to Add user to AD Group....')
                print(error)
                response = requests.post(webhook, data=json.dumps({'text': f'{error}' }))
                
    else:
        # User or Group not on the list baby!
        print('user or group not allowed to elevate')
        response = requests.post(webhook, data=json.dumps({'text': '*Failed to Elevate* ' + ad_user_on_queue + ' from: ' + group_on_queue + ' ....User or group not in allow list.' }))