Exemplo n.º 1
0
def lambda_handler(event, context):
    '''
    Lambda handler
    '''
    # get the club id
    club_id = os.environ['CLUB_ID']

    # open session
    session_requests = requests.session()

    # get a token for this session
    login_url = 'https://www.strava.com/login'
    result = session_requests.get(login_url)
    tree = html.fromstring(result.text)
    authenticity_token = list(
        set(
            tree.xpath(
                "//*[@id='login_form']/input[@name='authenticity_token']/@value"
            )))[0]

    # login
    login_result = session_requests.post(
        'https://www.strava.com/session',
        data=dict(email=os.environ['EMAIL'],
                  password=os.environ['PASSWORD'],
                  authenticity_token=authenticity_token),
        headers=dict(referer=login_url))

    # resulting activity records from strava
    records = set()

    # get the recent activity
    club_recent_activity = session_requests.get(
        f'https://www.strava.com/clubs/{club_id}/recent_activity')
    last_timestamp, temp_records = parse_activity_html(club_recent_activity)
    records.update(temp_records)

    # load more until we can
    while len(temp_records) > 0:
        club_recent_activity_continued = session_requests.get(
            f"https://www.strava.com/clubs/{club_id}/feed",
            params=dict(feed_type='club',
                        before=last_timestamp,
                        cursor=last_timestamp + ".0"))
        last_timestamp, temp_records = parse_activity_html(
            club_recent_activity_continued)
        records.update(temp_records)

    # close the session
    session_requests.close()

    return {'content': list(records)}
def handler(event, context):
    instance_id = os.environ["instanceId"]
    lambda_client = boto3.client('lambda')
    signed_res = lambda_client.invoke(
        FunctionName="getCaasSignature",
        InvocationType='RequestResponse',
        Payload=
        ("\"https://hws.hicloud.hinet.net/cloud_hws/api/hws/?action=stopInstances"
         "&instanceId={0}&version=2015-05-26\"").format(instance_id))
    signed_url = json.loads(signed_res['Payload'].read())
    session = requests.session()

    try:
        res = session.get(signed_url)
        res_json = res.json()
    except requests.exceptions.RequestException as e:
        return {
            'statusCode':
            500,
            'body':
            json.dumps({
                'message':
                "A network error occurred while stopping the server."
            })
        }

    if "errors" in res_json:
        return {
            'statusCode':
            500,
            'body':
            json.dumps({
                'message':
                "An exception occurred while stopping the server."
            })
        }

    if "statusMap" in res_json and instance_id in res_json["statusMap"]:
        if res_json["statusMap"][instance_id] == "vm_stopping":
            return {
                'statusCode': 200,
                'body': json.dumps({'message': "OK, Server is shutting down."})
            }
        else:
            return {
                'statusCode':
                500,
                'body':
                json.dumps(
                    {'message': res_json["statusMap"][instance_id] + "error."})
            }
Exemplo n.º 3
0
def lambda_handler(event, context):
    # print(event)
    message = json.loads(event['Records'][0]["Sns"]["Message"])
    project_id = message['project_id']
    target_record = message['target_record']
    scan_id = target_record['scan_id']
    target_id = target_record['id']
    paths = message['scan_list']
    task_number = message['task_number']
    protocol = message['additional_params']['protocol']
    starting_path = message['additional_params']['starting_path']
    port = message['additional_params']['port']
    target = target_record['host'] if target_record[
        'host'] is not "" else target_record['ip']

    try:
        task_queue_id = database.getTaskLock(project_id, target_id,
                                             task_number)
        if not task_queue_id:
            print("Task already processed")
            # this task is being processed by another function or has been before
            return
        loop = 0
        while database.at_max_concurrent_connections(
                project_id, target_id, task_number) and loop < 10:
            loop += 1
            sleep(5)
        if loop == 10:
            return

        session = requests.session()
        base_url = "{0}://{1}:{2}{3}".format(protocol, target, port,
                                             starting_path)
        results = scan_target(session, base_url, paths)
        results = save_target_record(scan_id, target_id, results)
        queue_recursive_scans(project_id, target_record, protocol, port,
                              results)
        queue_content_screenshot(project_id, target_record, results)
        database.unlock_task_record(task_queue_id)

    except Exception as e:
        error_handler.handleError(e)
Exemplo n.º 4
0
try:
    intezer_api_key = get_secret(secret_name=intezer_api_key_secret_name,
                                 region_name=region_name)
except Exception as e:
    print(e)

try:
    dynamodb = boto3.resource('dynamodb')
    table = dynamodb.Table(intezer_analysis_url_2_sha256_table)
except Exception as E:
    print(E)

response = requests.post(intezer_api_base_url + '/get-access-token',
                         json={'api_key': intezer_api_key})
response.raise_for_status()
session = requests.session()
session.headers['Authorization'] = session.headers[
    'Authorization'] = 'Bearer %s' % response.json()['result']


def return_code(status, body):
    return {"statusCode": status, "body": json.dumps(body)}


def get_analysis_url_from_dynamodb(sha256):
    try:
        sha256 = sha256.lower()
        result = table.get_item(Key={"sha256": sha256})
        if "Item" in result.keys():
            return {"code": "SUCCESS", "content": result['Item']}
        else:
Exemplo n.º 5
0
def execute_search(query,
                   creds,
                   req,
                   context,
                   query_name,
                   timerange=None,
                   jobid=None,
                   session=None,
                   st=None,
                   logger=None):
    """ Execute a sumo query using the provided creds
        Args:
        :query - query string
        :creds - a dict with a key called "access_key" of the format: <sumo_id>:<sumo_key>
        :timerange (optional) - a dict with 2 keys: 'to' and 'from' whose values are datetime values
        :return: results
    """

    # we won't actually use execute_search for this client
    lambda_client = lambda_simulator.LambdaClient(execute_search,
                                                  'execute_search')

    if not creds:
        raise Exception("invalid creds {} provided.".format(creds))

    headers = {
        'Content-Type': 'application/json',
        'Authorization': 'Basic ' + creds['access_key'],
        'Accept': 'application/json'
    }

    timenow = datetime.datetime.combine(datetime.date.today(),
                                        datetime.time.min)
    if (timerange is None):
        # ensure same time on any system running
        end_time = datetime.datetime.combine(
            datetime.date.today(),
            datetime.time.min) - datetime.timedelta(days=1)
        start_time = end_time - datetime.timedelta(hours=6)
    else:
        end_time = timerange['to']
        start_time = timerange['from']

    params = {
        'query': query,
        'from': start_time.strftime("%Y-%m-%dT%H:%M:%S"),
        'to': end_time.strftime("%Y-%m-%dT%H:%M:%S"),
        'timeZone': 'PST'
    }

    sumo_session = requests.session()
    # not all regions included here but us2 = BE = enough probably.

    api_endpoint = 'https://api.{}.sumologic.com/api/v1/search/jobs/'.format(creds["region"]) if creds['region'] in \
                                                                                                 ['us2', 'eu',
                                                                                                  'au'] else 'https://api.sumologic.com/api/v1/search/jobs/'
    if not jobid:
        r = sumo_session.post(api_endpoint, headers=headers, json=params)
        job_id = r.json()['id']

        r = sumo_session.get(api_endpoint + job_id, headers=headers)
        raw_state = r.json().get('state', None)
        if (raw_state is not None):
            request_state = raw_state.upper()
        else:
            if (logger is not None):
                logger.warn("State field doesn't exist. Response: {}".format(
                    str(r)))
            else:
                print("ERROR: State field doesn't exist. Response: {}".format(
                    str(r)))
            request_state = None

    else:
        # if jobid is not none, we have a context pass - set job_id used = jobid passed from last lambda.
        job_id = jobid
        request_state = 'GATHERING RESULTS'

    while ((request_state is not None) and (request_state not in [
            'PAUSED', 'FORCE PAUSED', 'DONE GATHERING RESULTS', 'CANCELED'
    ])):
        time.sleep(10)
        out_cookies = session if session else dict(sumo_session.cookies)
        # TODO: To handle the case when some function fails to finish within 5 mins
        if context.get_remaining_time_in_millis(
        ) / 1000 < seconds_remaining_before_re_execution:
            # replicate req:
            new_req = copy.copy(req)
            new_req['jobid'] = job_id
            new_req['session'] = out_cookies
            resp = lambda_client.invoke(
                FunctionName=context.invoked_function_arn,
                InvocationType='Event',
                Payload=json.dumps(new_req))
            print('Query: {}, context passed to {}, timeout.'.format(
                req['name'], resp))
            return None

        r = sumo_session.get('{}{}'.format(api_endpoint, job_id),
                             headers=headers,
                             cookies=out_cookies)
        if r.status_code != 200:
            print(
                f'error caught - retrying once.code: {r.status_code}, error: {r.content}'
            )
            time.sleep(10)
            r = sumo_session.get('{}{}'.format(api_endpoint, job_id),
                                 headers=headers,
                                 cookies=out_cookies)
            print(
                f'RETRY>>>> endpoint: {api_endpoint}, job_id: {job_id}, cooks: {out_cookies}'
            )
            print(f'RETRY>>>> status: {r.status_code}, resp: {r.content}')

        # end halabi debugging
        message_count = r.json().get('messageCount', None)
        if (message_count is None):
            if (logger is not None):
                logger.warn(
                    "MessageCount field doesn't exist. Response: {}".format(
                        str(r)))
            else:
                print("ERROR: MessageCount field doesn't exist. Response: {}".
                      format(str(r)))
            request_state = r.json().get('state', None)
            if (request_state is not None):
                request_state = request_state.upper()
            else:
                if (logger is not None):
                    logger.error("Request State is none!")
                else:
                    print("ERROR: Request State is none!")
                break
        else:
            #message_count = r.json()['messageCount']
            record_count = r.json().get('recordCount', None)
            request_state = r.json()['state'].upper()

    if ((request_state is not None) and ('DONE' in request_state)):
        # Check if we have enough time left to send data
        out_cookies = session if session else dict(sumo_session.cookies)
        count = record_count if record_count else message_count
        if (count > 100000):
            time_limit = 3 * seconds_remaining_before_re_execution
        else:
            time_limit = seconds_remaining_before_re_execution
        if context.get_remaining_time_in_millis() / 1000 < time_limit:
            # replicate req:
            new_req = copy.copy(req)
            new_req['jobid'] = job_id
            new_req['session'] = out_cookies
            resp = lambda_client.invoke(
                FunctionName=context.invoked_function_arn,
                InvocationType='Event',
                Payload=json.dumps(new_req))
            print(
                'Query: {} finished but need more time to stash, context passed to {}, timeout.'
                .format(req['name'], resp))
            return None
        out = []
        co = 0
        while co < count:
            r = sumo_session.get('{}{}/records?offset={}&limit=10000'.format(
                api_endpoint, job_id, co),
                                 headers=headers,
                                 cookies=out_cookies)

            co += 10000  # increase offset by paging limit

            if r.json().get('records', None):
                for row in r.json()['records']:
                    new_row = row['map']
                    # execution time
                    new_row['sys_date'] = st.strftime("%Y-%m-%dT%H:%M:%S") if (
                        st
                        is not None) else timenow.strftime("%Y-%m-%dT%H:%M:%S")
                    # actualy query time
                    new_row['query_start'] = start_time.strftime(
                        "%Y-%m-%dT%H:%M:%S")
                    new_row['query_end'] = end_time.strftime(
                        "%Y-%m-%dT%H:%M:%S")
                    out.append(new_row)
            else:
                for row in r.json()['messages']:
                    new_row = row['map']
                    # execution time
                    new_row['sys_date'] = st.strftime("%Y-%m-%dT%H:%M:%S") if (
                        st
                        is not None) else timenow.strftime("%Y-%m-%dT%H:%M:%S")
                    # actualy query time
                    new_row['query_start'] = start_time.strftime(
                        "%Y-%m-%dT%H:%M:%S")
                    new_row['query_end'] = end_time.strftime(
                        "%Y-%m-%dT%H:%M:%S")
                    out.append(new_row)
        return out

    elif request_state in ['PAUSED', 'FORCE PAUSED', 'CANCELED']:
        et = datetime.datetime.now()
        print("Request {}.".format(request_state))
        return None
    else:
        if (request_state is None):
            # Sumo API probably crapped out so we just rerun the query
            retry = req.get('retry', 0)
            if (retry < 3):
                new_req = copy.copy(req)
                if ('jobid' in new_req):
                    new_req.pop('jobid')
                if ('session' in new_req):
                    new_req.pop('session')
                new_req['retry'] = retry + 1
                resp = lambda_client.invoke(
                    FunctionName=context.invoked_function_arn,
                    InvocationType='Event',
                    Payload=json.dumps(new_req))
                print(
                    'Retry query: {}, timerange: {} to {},  context passed to {}, timeout.'
                    .format(query, start_time.strftime("%Y-%m-%dT%H:%M:%S"),
                            end_time.strftime("%Y-%m-%dT%H:%M:%S"), resp))
                return None
            else:
                print(
                    "Give up trying query {} timerange: {} to {} after {} times"
                    .format(query, start_time.strftime("%Y-%m-%dT%H:%M:%S"),
                            end_time.strftime("%Y-%m-%dT%H:%M:%S"), retry))
                return None
        print("Error, request state was: {}.".format(request_state))
        return None
def lambda_handler(event, context):
    

    incidentId ='CentenaryHighway'
    incidentCord = '-27.555910,152.940246'
    
    # #setup dataframe
    dfcols = ['id','linkId','name','avSpeed','jamF','direc','cords']
    dfHere = pd.DataFrame(columns = dfcols)

    #configure session request API
    starttime = time.time()
    urlsession = requests.session()
    prox = "0.01"# "20" #proximity in metres 
    
    #configure payload
    url = "https://traffic.api.here.com/traffic/6.2/flow.json?app_id=" + app_id + "&app_code=" + app_code
    url +="&prox="+incidentCord+","+prox+"&responseattributes=sh,fc&units=metric"
    #send request
    response = requests.get(url, timeout=600)    
    response = response.content
    #clean up
    urlsession.close()    
     
    #break if no return
    if response !="": #only process return values
    
        #process json return for output
        try:
            r=json.loads(response)   
            for el1 in r['RWS']:
                for el2 in el1['RW']:
                        for el3 in el2['FIS']: #Road level
                            for el4 in el3['FI']: #flow information extract here at link level
                                linRd = el4['TMC'].get('DE').replace("'","") #get rid of ' i.e "O'keefe Street" to "Okeefe Street"
                                linRd_pk =el4['TMC'].get('PC')
                                flowInfoDirection = el4['TMC'].get('QD') 
                                #print(linRd)
                                flowInfoSpeed = el4['CF'][0].get('SU') #Speed (based on UNITS) not capped by speed limit
                                flowInfoJam = el4['CF'][0].get('JF') #The number between 0.0 and 10.0 indicating the expected quality of travel. When there is a road closure, the Jam Factor will be 10. As the number approaches 10.0 the quality of travel is getting worse. -1.0 indicates that a Jam Factor could not be calculated
                                flowInfoCon =  el4['CF'][0].get('CN') #Confidence, an indication of how the speed was determined. -1.0 road closed. 1.0=100% 0.7-100% Historical Usually a value between .7 and 1.0
                                for el5 in el4['SHP']: #get shape file
                                    cordStr = el5['value']
                                    dfHere.loc[len(dfHere)] = [incidentId,linRd_pk,linRd, flowInfoSpeed,flowInfoJam,flowInfoDirection,cordStr]
        except Exception as ex:
            print(str(response))
            raise ex       

    #return only a single link from response json
    link_of_interest=dfHere.loc[(dfHere['linkId'] == 31862) & (dfHere['name'] == 'Sumners Road') & (dfHere['direc'] == '+') ] 

    #extract speed
    link_of_interest_avspeed = round(link_of_interest['avSpeed'].mean(),1)
    
    #extract jamFactor
    link_of_interest_jamfactor = round(link_of_interest['jamF'].mean(),1)
    
    #set time variable
    link_time =date.strftime(datetime.datetime.utcnow() + datetime.timedelta(hours=10),'%Y-%m-%d %H:%M') 

    # #extract coordinates if required...
    # here_locations = [] 
    # # iterate over rows with iterrows()
    # for index, row in link_of_interest.head().iterrows():
    #      # access data using column names
    #     cord_string='['+str(row['cords'])+']'
    #     cord_string = cord_string.replace(' -','],[-').replace("'","")
    #     cord_string =eval(cord_string)
    #     here_locations.append(cord_string)

    # read existing csv file
    s3_client = boto3.client('s3')
    outbucketname= 'public-test-road'
    key = r'hypothesis2/livelinks_here_31862.csv'
    org_incCsv = pd.read_csv(s3_client.get_object(Bucket=outbucketname, Key=key)['Body'])
    
    # append to file 
    org_incCsv.loc[len(org_incCsv)] = [str(31862),str(link_of_interest_avspeed),str(link_of_interest_jamfactor),str(link_time)]

    # Send out to S3
    tmpfp=r'/tmp/combine_csv.csv' #we have 300MB of storage under /tmp
    with open(tmpfp, 'w') as h:
        h.write('id,speed,jamf,time'+ '\n')
        for index, row in org_incCsv.iterrows():
            lineCsv = str(row['id'])+','+str(row['speed'])+','+str(row['jamf'])+','+str(row['time']) + '\n'  
            h.write(str(lineCsv))
    s3_client.upload_file(tmpfp,Bucket=outbucketname,Key=key)


    ### This section calls Streams transmax loop

   #setpayload
    url='https://api.dtmr.staging.data.streams.com.au'
    ser_SIMSRecent  = url + "/traffic/v1/link/csv"
    headers_txt = {'Content-type': 'application/csv','x-api-key': 'USoLc2B9De86v9QHy5ahcaXkXJd8Fq0a7MCwTI2V'}
    
    #sensor 8484287
    payload_ser_aggDet = {
            'ids':8484273
              }   
    
    response = requests.get(ser_SIMSRecent,params=payload_ser_aggDet, headers=headers_txt, timeout=600)
    decoded_content = response.content.decode('utf-8')
    cr = csv.reader(decoded_content.splitlines(), delimiter=',')
    
    my_list = list(cr)
    #return speed if a valid record
    if int(my_list[1][0]) > 0:
        id_8484273_speed = int(my_list[4][4])
        id_8484273_time = date.strftime(datetime.datetime.strptime(my_list[4][1][:-1],'%Y-%m-%dT%H:%M:%S.%f')+timedelta(hours = 10), '%Y-%m-%d %H:%M')
        
    #sensor 8484287
    payload_ser_aggDet = {
            'ids':8484287
              }   
    response = requests.get(ser_SIMSRecent,params=payload_ser_aggDet, headers=headers_txt, timeout=600)
    decoded_content = response.content.decode('utf-8')
    cr = csv.reader(decoded_content.splitlines(), delimiter=',')
    my_list = list(cr)
    #return speed if a valid record
    if int(my_list[1][0]) > 0:
        id_8484287_speed = int(my_list[4][4]) 
        id_8484287_time = date.strftime(datetime.datetime.strptime(my_list[4][1][:-1],'%Y-%m-%dT%H:%M:%S.%f')+timedelta(hours = 10), '%Y-%m-%d %H:%M')
        
    #get average values from two sensors
    if (id_8484273_speed > 0) & (id_8484287_speed > 0):
        average_speed =(id_8484273_speed +id_8484287_speed)/2
    else:
        average_speed =0 

    # read and send out to S3 - 1st loop
    key2 = r'hypothesis2/livelinks_streams_8484273.csv'
    tmpfp=r'/tmp/loops_csv.csv' #we have 300MB of storage under /tmp
    #read
    org_incCsv = pd.read_csv(s3_client.get_object(Bucket=outbucketname, Key=key2)['Body'])
    # append  
    org_incCsv.loc[len(org_incCsv)] = [str(8484273),str(id_8484273_speed),str(id_8484273_time)]
    # write to file 
    with open(tmpfp, 'w') as h:
        h.write('id,speed,time'+ '\n')
        for index, row in org_incCsv.iterrows():
            lineCsv = str(row['id'])+','+str(row['speed'])+','+str(row['time']) + '\n'  
            h.write(str(lineCsv))
    s3_client.upload_file(tmpfp,Bucket=outbucketname,Key=key2)
 
 
     # Send out to S3
    key3 = r'hypothesis2/livelinks_streams_8484287.csv'
    #read
    org_incCsv = pd.read_csv(s3_client.get_object(Bucket=outbucketname, Key=key3)['Body'])
    # append  
    org_incCsv.loc[len(org_incCsv)] = [str(8484287),str(id_8484287_speed),str(id_8484287_time)]
    with open(tmpfp, 'w') as h:
        h.write('id,speed,time'+ '\n')
        for index, row in org_incCsv.iterrows():
            lineCsv = str(row['id'])+','+str(row['speed'])+','+str(row['time']) + '\n'  
            h.write(str(lineCsv))
    s3_client.upload_file(tmpfp,Bucket=outbucketname,Key=key3)   
                
                
     # Send out to S3
    key4 = r'hypothesis2/livelinks_streams_average.csv'
    #read
    org_incCsv = pd.read_csv(s3_client.get_object(Bucket=outbucketname, Key=key4)['Body'])
    # append  
    org_incCsv.loc[len(org_incCsv)] = ['aveage',str(average_speed),str(id_8484287_time)]
    with open(tmpfp, 'w') as h:
        h.write('id,speed,time'+ '\n')
        for index, row in org_incCsv.iterrows():
            lineCsv = str(row['id'])+','+str(row['speed'])+','+str(row['time']) + '\n'  
            h.write(str(lineCsv))
    s3_client.upload_file(tmpfp,Bucket=outbucketname,Key=key4) 
    

    
    return
Exemplo n.º 7
0
def playintent(intent, session):
    card_title = intent['name']
    session_attributes = {}
    should_end_session = False
    serviceslot = intent['slots']['service']['resolutions'][
        'resolutionsPerAuthority'][0]['values'][0]['value']['name']
    locationslot = intent['slots']['location']['resolutions'][
        'resolutionsPerAuthority'][0]['values'][0]['value']['name']
    if (serviceslot == None or serviceslot == ""):
        serviceslot = intent['slots']['service']['value']
    if (locationslot == None or locationslot == ""):
        locationslot = intent['slots']['location']['value']
    target = ""
    if (serviceslot == 'netflix'):

        LOGIN_URL = "https://www.netflix.com/login"
        URL = "https://www.netflix.com/WiViewingActivity"

        session_requests = requests.session()
        target = '1'

        # Get login csrf token
        result = session_requests.get(LOGIN_URL)
        parser = LoginParser()
        parser.feed(result.text)

        # Create payload
        payload = {
            "action": "loginAction",
            "authURL": authenticity_token,
            "email": USERNAME,
            "flow": "websiteSignUp",
            "mode": "login",
            "nextPage": "",
            "password": PASSWORD,
            "rememberMe": "true",
            "showPassword": "",
            "withFields": "email,password,rememberMe,nextPage,showPassword"
        }

        # Perform login
        result = session_requests.post(LOGIN_URL,
                                       data=payload,
                                       headers=dict(referer=URL))

        # Scrape url
        result = session_requests.get(URL, headers=dict(referer=LOGIN_URL))

        parser = lastwatchedparser()
        parser.feed(result.text)

        #Update blynk
        lastwatchedurl = "http://" + BLYNKIP + "/" + BLYNKAUTH + "/update/V4?value=" + lastwatchid[
            7:15]

        #Method 1 of sending the request
        #lastreq = urlreq.Request(lastwatchedurl)
        #urlreq.urlopen(lastreq).read()

        #Method 2 of sending the request, I found this to be more reliable
        update_nfx = requests.session()
        nfxresult = update_nfx.get(lastwatchedurl)
    elif (serviceslot == 'spotify'):
        target = '2'
    elif (serviceslot == 'VLC'):
        target = '3'

    if (locationslot == 'living room'):
        target += '4'
    elif (locationslot == 'bedroom'):
        target += '5'
    elif (locationslot == 'PC'):
        target += '6'

    finalurl = "http://" + BLYNKIP + "/" + BLYNKAUTH + "/update/V3?value=" + target

    #Method 1 of sending the request
    #req = urlreq.Request(finalurl)
    #urlreq.urlopen(req).read()

    #Method 2 of sending the request
    updatefinalurl = requests.session()
    finalresults = updatefinalurl.get(finalurl)

    session_attributes = target
    speech_output = "Playing"
    reprompt_text = None
    should_end_session = True
    return build_response(
        session_attributes,
        build_speechlet_response(card_title, speech_output, reprompt_text,
                                 should_end_session))
def handler(event, context):
    instance_id = os.environ['instanceId']
    lambda_client = boto3.client('lambda')

    status_signed_res = lambda_client.invoke(
        FunctionName="getCaasSignature",
        InvocationType='RequestResponse',
        Payload=("\"https://hws.hicloud.hinet.net/cloud_hws/api/hws/?"
                 "action=describeInstances&instanceId={0}"
                 "&version=2015-05-26\"").format(instance_id))

    status_signed_url = json.loads(status_signed_res['Payload'].read())
    session = requests.session()
    try:
        status_res = session.get(status_signed_url)
        status_res_json = status_res.json()
    except requests.exceptions.RequestException as e:
        return {
            'statusCode':
            500,
            'body':
            json.dumps({
                'message':
                "A network error occurred while getting the Server state."
            })
        }

    if "instanceList" in status_res_json and len(status_res_json["instanceList"]) == 1 and "operationStatus" in \
            status_res_json["instanceList"][0]:
        status = status_res_json["instanceList"][0]["operationStatus"]

        if status == "vm_stop":
            start_signed_res = lambda_client.invoke(
                FunctionName="getCaasSignature",
                InvocationType='RequestResponse',
                Payload=("\"https://hws.hicloud.hinet.net/cloud_hws/api/hws/?"
                         "action=startInstances&instanceId={0}"
                         "&version=2015-05-26\"").format(instance_id))
            start_signed_url = json.loads(start_signed_res['Payload'].read())

            try:
                start_res = session.get(start_signed_url)
                start_res_json = start_res.json()
            except requests.exceptions.RequestException as e:
                return {
                    'statusCode':
                    500,
                    'body':
                    json.dumps({
                        'message':
                        "A network error occurred while starting the server."
                    })
                }

            if "errors" in start_res_json:
                return {
                    'statusCode':
                    500,
                    'body':
                    json.dumps({
                        'message':
                        ("An exception occurred while starting the server. "
                         "Please try again later.")
                    })
                }
            else:
                status = "OK, Server is booting up."
        elif status == "vm_start":
            status = (
                "Server is powered on, you can try to connect directly. "
                "If you are unable to connect, maybe our server is starting the VPN service, "
                "or is preparing to shutdown. Please try again after about 1 minute."
            )
        elif status == "vm_stopping":
            status = "Server is shutting down, please try again later."
        elif status == "vm_starting":
            status = "Server is booting up, please try again later."
        else:
            status = "Unknown error."

        return {'statusCode': 200, 'body': json.dumps({'message': status})}
Exemplo n.º 9
0
def slack_session():
    session = requests.session()
    token = os.environ['SLACK_TOKEN']
    session.headers['Authorization'] = f"Bearer {token}"
    session.headers['Content-Type'] = 'application/json; charset=utf-8'
    return session