def sentimentanalyze(m): error = False body = m.get_body() tweet = ast.literal_eval(body) response = alchemy.sentiment("text", tweet['text']) print(response) if (response['status'] == 'ERROR'): print('ERROR') error = True if not error: tweet['sentiment'] = response["docSentiment"]["type"] json_string = json.dumps(tweet) sns.publish(sas.arn['arn'], json_string, subject='Newtopic') #print (sns.publish(sas.arn['arn'], json_string, subject='Newtopic')) #print(tweet['sentiment']) print('--------------------------------------') #print (tweet) index = "geotweets" try: elasticsearch.index(index="geotweets", doc_type="tweet", body=tweet) #print (elasticsearch.index(index="geo-tweets", doc_type="tweet", body=tweet)) except Exception as e: print("Could not index this shit") pass #delete notification when done print('Done')
def application(environ, start_response): path = environ['PATH_INFO'] method = environ['REQUEST_METHOD'] if method == 'POST': try: if path == '/': request_body_size = int(environ['CONTENT_LENGTH']) request_body = environ['wsgi.input'].read(request_body_size).decode() domain = base64.b64decode(request_body) domain = json.loads(domain) try: message = domain['content'] mid = domain['id'] senti = senti_api.sentiment(message) logger.info("Received message: Sentiment: %s" % senti) sns.publish(topicarn, json.dumps({'id': mid, 'senti': senti})) except Exception: logger.warning('Error receiving data') except (TypeError, ValueError): logger.warning('Error retrieving request body for async work.') response = '' else: response = '' status = '200 OK' headers = [('Content-type', 'text/html')] start_response(status, headers) return [response]
def handle(self, *args, **options): # pylint: disable=too-many-locals, too-many-branches, too-many-statements tokens = {} for point in DataPoint.objects.filter(generator_identifier='pdk-app-event', secondary_identifier='pdk-ios-device-token').order_by('created'): properties = point.fetch_properties() tokens[point.source] = properties['event_details']['token'] region = [r for r in boto.sns.regions() if r.name == settings.PDK_BOTO_REGION][0] notification = {'aps': {'content-available' : 1}} message = {'APNS': json.dumps(notification), 'default': 'nil'} sns = boto.sns.SNSConnection( aws_access_key_id=settings.PDK_BOTO_ACCESS_KEY, aws_secret_access_key=settings.PDK_BOTO_ACCESS_SECRET, region=region, ) for source, token in tokens.iteritems(): # pylint: disable=unused-variable try: endpoint_response = sns.create_platform_endpoint( platform_application_arn=settings.PDK_BOTO_SNS_ARN, token=token, ) endpoint_arn = endpoint_response['CreatePlatformEndpointResponse']['CreatePlatformEndpointResult']['EndpointArn'] except boto.exception.BotoServerError, err: print 'ERR 1: ' + err.message # Yes, this is actually the official way: # http://stackoverflow.com/questions/22227262/aws-boto-sns-get-endpoint-arn-by-device-token result_re = re.compile(r'Endpoint(.*)already', re.IGNORECASE) result = result_re.search(err.message) if result: endpoint_arn = result.group(0).replace('Endpoint ', '').replace(' already', '') else: raise try: sns.publish(target_arn=endpoint_arn, message_structure='json', message=json.dumps(message)) except boto.exception.BotoServerError, err: print 'FAILED SENDING TO ' + token print 'ERR: ' + err.message result_re = re.compile(r'Endpoint(.*)disabled', re.IGNORECASE) result = result_re.search(err.message) if result: for point in DataPoint.objects.filter(source=source, generator_identifier='pdk-app-event', secondary_identifier='pdk-ios-device-token').order_by('created'): properties = point.fetch_properties() if token == properties['event_details']['token']: print 'RENAMING: ' + token point.secondary_identifier = 'pdk-ios-device-token-sandbox' point.save() else: raise
def send_message(message): region = [r for r in boto.sns.regions() if r.name == 'us-east-1'][0] access_key, access_secret_key = aws_keys() sns = boto.sns.SNSConnection(aws_access_key_id=access_key, aws_secret_access_key=access_secret_key, region=region) sns.publish(topic="arn:aws:sns:us-east-1:001928331621:notifications", message=message) print 'message sent!'
def run(self): if self.bucket: connection = boto.connect_s3() bucket = Bucket(connection, self.bucket) key = Key(bucket) key.key = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') + '.jpg' key.set_contents_from_string(self.image, headers={'Content-Type': 'image/jpeg'}) print 'uploaded:', key.key if self.region and self.topic: url = key.generate_url(31536000) sns = boto.sns.connect_to_region(self.region) sns.publish(self.topic, url) print 'published:', self.topic, url
def gen_and_diff(logdir="."): # Get previous report lfiles = get_last_tstamp_file(logdir) if len(lfiles) > 0: f = lfiles[0] else: print "No previous log files to compare." report = gen_report() if args.outfile: print "Writing first log file." save_report(report, args.outfile) else: print report sys.exit(0) print "Comparing to " + str(f) # Load previous report preport = open(f).readlines() # Gen current report creport = gen_report() diff = difflib.unified_diff(preport, creport, n=1) ldiff = list(diff) if len(ldiff) > 0: print "Found diffs. Sending notification." sns = boto.sns.connect_to_region( 'us-west-2', aws_access_key_id=assume_role.credentials.access_key, aws_secret_access_key=assume_role.credentials.secret_key, security_token=assume_role.credentials.session_token) diffstr = ''.join(ldiff) if args.sns: sns.publish(args.sns, message=diffstr, subject="Security Notice!") else: print diffstr if args.outfile: save_report(creport, args.outfile) return ldiff else: return None
def sns_send_push_notification_to_user(endpoint=None, message=None, data=None, user=None, **kwargs): if not user: return boto.set_stream_logger('boto') sns = boto.sns.connect_to_region('us-east-1') apns_dict = {'aps':{'alert':message,'sound':'default'}} if data: apns_dict['data'] = data apns_string = json.dumps(apns_dict, ensure_ascii=False) message = {'default':message, settings.AWS_SNS_APNS_PLATFORM:apns_string} messageJSON = json.dumps(message, ensure_ascii=False) try: for device in user.installations.all(): apns_endpoint = None apns = device.push_endpoints.get('apns', None) if apns: apns_endpoint = apns.get(endpoint, None) if apns_endpoint: msg_id = sns.publish(topic=None, message = messageJSON, target_arn=apns_endpoint, message_structure='json') except BotoServerError as e: logger.error("utils.aws.sns_send_push_notification_to_user. Boto error {} to user {}".format(e.code, user)) except: logger.error("utils.aws.sns_send_push_notification_to_user. Unknown error {} {}".format(sys.exc_info()[0], sys.exc_info()[1]))
def gen_and_diff(logdir="."): # Get previous report lfiles = get_last_tstamp_file(logdir) if len(lfiles) > 0: f = lfiles[0] else: print "No previous log files to compare." report = gen_report() if args.outfile: print "Writing first log file." save_report(report, args.outfile) else: print report sys.exit(0) print "Comparing to " + str(f) # Load previous report preport = open(f).readlines() # Gen current report creport = gen_report() diff = difflib.unified_diff(preport, creport, n=1) ldiff = list(diff) if len(ldiff) > 0: print "Found diffs. Sending notification." sns = boto.sns.connect_to_region('us-west-2', aws_access_key_id=assume_role.credentials.access_key, aws_secret_access_key=assume_role.credentials.secret_key, security_token=assume_role.credentials.session_token) diffstr = ''.join(ldiff) if args.sns: sns.publish(args.sns, message=diffstr, subject="Security Notice!") else: print diffstr if args.outfile: save_report(creport, args.outfile) return ldiff else: return None
def diff_recent_logs(): import difflib from os import listdir from os.path import isfile, join lfiles = [f for f in listdir(LOGDIR) if isfile(join(LOGDIR, f))] sf = sorted(lfiles, key=_reduce_to_date) last2 = sf[-2:] diff = difflib.unified_diff(open(LOGDIR + last2[1]).readlines(), open(LOGDIR + last2[0]).readlines()) ldiff = list(diff) diffstr = '\n'.join(ldiff) import boto.sns if len(ldiff) > 0: if args.sns: sns = boto.sns.connect_to_region('us-west-2') sns.publish(args.sns, message=diffstr, subject="Security Notice!") else: print "It's fine"
def send_ok(): sns =boto.sns.connect_to_region('ap-northeast-1') topics = sns.get_all_topics()["ListTopicsResponse"]["ListTopicsResult"]["Topics"] mytopic = topics[0] msg = u"Boss:\n Everything is ready !" print datetime.now().month, datetime.now().day msg+=u'\n\n\n\n\n'+ history.history(datetime.now().month,datetime.now().day).string subj = "成功完成等保申报任务" res = sns.publish(mytopic['TopicArn'], msg, subj)
def send_push(room, value, room_type, timestamp): print 'Sending push notification for %s: %s (at time %s)' % ( room, value, timestamp) sns = boto.sns.connect_to_region( Constants.aws_region, aws_access_key_id=Constants.aws_access_key, aws_secret_access_key=Constants.aws_secret_key) json_string = json.dumps({ 'default': ' '.join([room, value]), 'GCM': json.dumps({'data': { 'room': room, 'value': value, 'type': room_type, 'time': timestamp} }) }) sns.publish( Constants.aws_sns_topic, json_string, message_structure='json')
def send_ok(): sns = boto.sns.connect_to_region('ap-northeast-1') topics = sns.get_all_topics( )["ListTopicsResponse"]["ListTopicsResult"]["Topics"] mytopic = topics[0] msg = u"Boss:\n Everything is ready !" print datetime.now().month, datetime.now().day msg += u'\n\n\n\n\n' + history.history(datetime.now().month, datetime.now().day).string subj = "成功完成等保申报任务" res = sns.publish(mytopic['TopicArn'], msg, subj)
def lambda_handler(event, context): global email_message, snap_create_message,snap_delete_message, \ errmsg, total_creates, total_deletes, count_errors email_message = "" snap_create_message = "" snap_delete_message = "" errmsg = "" # Counters total_creates = 0 total_deletes = 0 count_errors = 0 setup_logging() make_connections() volume_handler(find_volumes()) # compose email message email_message += snap_create_message + '\n' + snap_delete_message email_message += "\nTotal snapshots created: " + str(total_creates) email_message += "\nTotal snapshot errors: " + str(count_errors) email_message += "\nTotal snapshots deleted: " + str(total_deletes) + "\n\n" email_message += 'Finished making snapshots at %(date)s.' % { 'date': datetime.today().strftime('%Y-%m-%d %H:%M:%S') } print email_message # SNS reporting if sns_arn: if errmsg: sns.publish(topic=sns_arn, message='Error in processing volumes:\n' + errmsg, subject=config.sns['subject'] + ' / ERROR with AWS Snapshot') sns.publish(topic=sns_arn, message=email_message, subject=config.sns['subject'])
def diff_recent_logs(): import difflib from os import listdir from os.path import isfile, join lfiles = [f for f in listdir(LOGDIR) if isfile(join(LOGDIR, f))] sf = sorted(lfiles, key=_reduce_to_date) last2 = sf[-2:] diff = difflib.unified_diff( open(LOGDIR + last2[1]).readlines(), open(LOGDIR + last2[0]).readlines()) ldiff = list(diff) diffstr = '\n'.join(ldiff) import boto.sns if len(ldiff) > 0: if args.sns: sns = boto.sns.connect_to_region( 'us-west-2', aws_access_key_id=assume_role.credentials.access_key, aws_secret_access_key=assume_role.credentials.secret_key, security_token=assume_role.credentials.session_token) sns.publish(args.sns, message=diffstr, subject="Security Notice!") else: print "It's fine"
def sns_send_push_notification_to_user(endpoint=None, message=None, data=None, user=None, **kwargs): if not user: return boto.set_stream_logger('boto') sns = boto.sns.connect_to_region('us-east-1') apns_dict = {'aps': {'alert': message, 'sound': 'default'}} if data: apns_dict['data'] = data apns_string = json.dumps(apns_dict, ensure_ascii=False) message = {'default': message, settings.AWS_SNS_APNS_PLATFORM: apns_string} messageJSON = json.dumps(message, ensure_ascii=False) try: for device in user.installations.all(): apns_endpoint = None apns = device.push_endpoints.get('apns', None) if apns: apns_endpoint = apns.get(endpoint, None) if apns_endpoint: msg_id = sns.publish(topic=None, message=messageJSON, target_arn=apns_endpoint, message_structure='json') except BotoServerError as e: logger.error( "utils.aws.sns_send_push_notification_to_user. Boto error {} to user {}" .format(e.code, user)) except: logger.error( "utils.aws.sns_send_push_notification_to_user. Unknown error {} {}" .format(sys.exc_info()[0], sys.exc_info()[1]))
def checkLogs(buckets): #Set ignored id list, add your own account ID here ignoredId = [] #Static email address for testing #emailAddr = "*****@*****.**" #Set the name of our cloudTrails bucket ct_name = "cloudtrailadam" for bucket in buckets: #Iterate through buckets and find the cloudtrail bucket if bucket.name == ct_name: #Iterate though all keys in the bucket such that key points to the last one #which is the last created log file for key in bucket: #Check if key is a file and not a folder by checking the end of the path if not key.name.endswith('/'): #Construct an empty StringIO object f = StringIO.StringIO() #Using the boto s3 method get_file, retrieve the file from the S3 key and place the data in the StringIO buffer key.get_file(f) #Set the position of the read/write pointer to the beginning of the file f.seek(0, 0) #Construct a new GzipFile object and pass it the file object contained in f gzfile = gzip.GzipFile(fileobj=f) #get the data from the file using pythons .read method data = gzfile.read() #Pass the data to the .loads method in the json library which returns an object j = json.loads(data) #Set the parent key from the JSON to look for keys within the main key parent = j["Records"] #Iterate though the json parent key again for item in parent: #If the accountId of any event does not match user specified one. if item['userIdentity']['accountId'] not in ignoredId: print "\nAccess from unknown account: ", item['userIdentity']['accountId'], " found!\n" #print the details of the event print "Event Details: ", item["sourceIPAddress"], item["userIdentity"]["type"], item["userIdentity"]["accountId"], item["eventName"], item["eventTime"] while(True): input = raw_input("\nDo you wish to ignore events from this account ID? (Y/N) ").lower() if input in ('yes', 'y', 'ye'): #Add the event account ID to the list of ignored IDs ignoredId.append(item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): input = raw_input("Do you wish to be notified of the first event of this ID via email? (Y/N) > ").lower() if input in ('yes', 'y', 'ye'): #Set up email address emailAddr = raw_input("Please enter an email address to receive alarm notifications: ") #Create SNS connection sns = boto.sns.connect_to_region("eu-west-1", aws_access_key_id=key_id, aws_secret_access_key=secret_key) #Pull all of the topic ARNs on the account and store them in topics topics = sns.get_all_topics() #Get the first topic ARN and store it in topic topic = topics[u'ListTopicsResponse']['ListTopicsResult']['Topics'][0]['TopicArn'] #Set up email message body to contain event information msg = "Event details - Source IP: " + str(item["sourceIPAddress"]) + " : Account ID: " + str(item["userIdentity"]["accountId"]) + " : Event type: " + str(item["eventName"]) + " : Event time: " + str(item["eventTime"]) #Set up email message subject subject = "Unauthorised account access" #Public this message to SNS sns.publish(topic, msg, subject) #Subscribe to the alarm using the specified email address and the email protocol sns.subscribe(topic, "email", emailAddr) #Add ID to ignored list to continue checking for other unauthorised accounts ignoredId.append(item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): break else: print "Invalid command" else: print "Invalid command" print "\n\nCheck complete\n"
def checkLogs(buckets): ignoredId = [] ct_name = "cloudtrailadam" for bucket in buckets: if bucket.name == ct_name: for key in bucket: if not key.name.endswith('/'): f = StringIO.StringIO() key.get_file(f) f.seek(0, 0) gzfile = gzip.GzipFile(fileobj=f) data = gzfile.read() j = json.loads(data) parent = j["Records"] for item in parent: if item['userIdentity']['accountId'] not in ignoredId: print "\nAccess from unknown account: ", item[ 'userIdentity']['accountId'], " found!\n" print "Event Details: ", item[ "sourceIPAddress"], item["userIdentity"][ "type"], item["userIdentity"][ "accountId"], item["eventName"], item[ "eventTime"] while (True): input = raw_input( "\nDo you wish to ignore events from this account ID? (Y/N) " ).lower() if input in ('yes', 'y', 'ye'): ignoredId.append( item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): input = raw_input( "Do you wish to be notified of the first event of this ID via email? (Y/N) > " ).lower() if input in ('yes', 'y', 'ye'): emailAddr = raw_input( "Please enter an email address to receive alarm notifications: " ) sns = boto.sns.connect_to_region( "eu-west-1", aws_access_key_id=key_id, aws_secret_access_key=secret_key) topics = sns.get_all_topics() topic = topics[u'ListTopicsResponse'][ 'ListTopicsResult']['Topics'][0][ 'TopicArn'] msg = "Event details - Source IP: " + str( item["sourceIPAddress"] ) + " : Account ID: " + str( item["userIdentity"]["accountId"] ) + " : Event type: " + str( item["eventName"] ) + " : Event time: " + str( item["eventTime"]) subject = "Unauthorised account access" sns.publish(topic, msg, subject) sns.subscribe(topic, "email", emailAddr) ignoredId.append( item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): break else: print "Invalid command" else: print "Invalid command" print "\n\nCheck complete\n"
#!/usr/bin/env python """Script to upload packages to s3 and notify repoupdate-daemon.""" import os import optparse import boto import boto.sns parser = optparse.OptionParser() parser.add_option('--bucket', default='packages.example.com') parser.add_option('--repopath', default='development/x86_64') parser.add_option('--region', default='us-east-1') parser.add_option('--sns-topic', default='arn:aws:sns:us-east-1:123:packages-new') options, args = parser.parse_args() sns = boto.sns.connect_to_region(options.region) bucket = boto.connect_s3().get_bucket(options.bucket) for rpmfile in args: filename = os.path.split(rpmfile)[1] key = bucket.new_key(os.path.join(options.repopath, filename)) key.set_contents_from_filename(rpmfile) sns.publish(options.sns_topic, filename, options.repopath)
def checkLogs(buckets): #Set ignored id list, add your own account ID here ignoredId = [] #Static email address for testing #emailAddr = "*****@*****.**" #Set the name of our cloudTrails bucket ct_name = "cloudtrailadam" for bucket in buckets: #Iterate through buckets and find the cloudtrail bucket if bucket.name == ct_name: #Iterate though all keys in the bucket such that key points to the last one #which is the last created log file for key in bucket: #Check if key is a file and not a folder by checking the end of the path if not key.name.endswith('/'): #Construct an empty StringIO object f = StringIO.StringIO() #Using the boto s3 method get_file, retrieve the file from the S3 key and place the data in the StringIO buffer key.get_file(f) #Set the position of the read/write pointer to the beginning of the file f.seek(0, 0) #Construct a new GzipFile object and pass it the file object contained in f gzfile = gzip.GzipFile(fileobj=f) #get the data from the file using pythons .read method data = gzfile.read() #Pass the data to the .loads method in the json library which returns an object j = json.loads(data) #Set the parent key from the JSON to look for keys within the main key parent = j["Records"] #Iterate though the json parent key again for item in parent: #If the accountId of any event does not match user specified one. if item['userIdentity']['accountId'] not in ignoredId: print "\nAccess from unknown account: ", item[ 'userIdentity']['accountId'], " found!\n" #print the details of the event print "Event Details: ", item[ "sourceIPAddress"], item["userIdentity"][ "type"], item["userIdentity"][ "accountId"], item["eventName"], item[ "eventTime"] while (True): input = raw_input( "\nDo you wish to ignore events from this account ID? (Y/N) " ).lower() if input in ('yes', 'y', 'ye'): #Add the event account ID to the list of ignored IDs ignoredId.append( item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): input = raw_input( "Do you wish to be notified of the first event of this ID via email? (Y/N) > " ).lower() if input in ('yes', 'y', 'ye'): #Set up email address emailAddr = raw_input( "Please enter an email address to receive alarm notifications: " ) #Create SNS connection sns = boto.sns.connect_to_region( "eu-west-1", aws_access_key_id=key_id, aws_secret_access_key=secret_key) #Pull all of the topic ARNs on the account and store them in topics topics = sns.get_all_topics() #Get the first topic ARN and store it in topic topic = topics[u'ListTopicsResponse'][ 'ListTopicsResult']['Topics'][0][ 'TopicArn'] #Set up email message body to contain event information msg = "Event details - Source IP: " + str( item["sourceIPAddress"] ) + " : Account ID: " + str( item["userIdentity"]["accountId"] ) + " : Event type: " + str( item["eventName"] ) + " : Event time: " + str( item["eventTime"]) #Set up email message subject subject = "Unauthorised account access" #Public this message to SNS sns.publish(topic, msg, subject) #Subscribe to the alarm using the specified email address and the email protocol sns.subscribe(topic, "email", emailAddr) #Add ID to ignored list to continue checking for other unauthorised accounts ignoredId.append( item['userIdentity']['accountId']) break elif input in ('no', 'n', '0'): break else: print "Invalid command" else: print "Invalid command" print "\n\nCheck complete\n"
if not os.path.exists(str(v)): file = open(str(v), "w") unhealthy = 1 file.write(str(unhealthy)) file.close() else: file = open(str(v), "r+") unhealthy = int(file.read()) + 1 file.seek(0) file.write(str(unhealthy)) file.close() if unhealthy >= UNHEALTHY_THRESH: msg = msg + "{} is down. Unhealthy {} >= {}".format(v, unhealthy, UNHEALTHY_THRESH) if v not in IGNORE_VPN: if unhealthy == UNHEALTHY_THRESH or unhealthy % 10 == 0: print "ALERT: Tunnel is down. SNS sent to {}".format(args.sns) sns.publish(args.sns, message=msg, subject="AWS ALERT! VPN [{}] is Down! {} >= {}".format(v, unhealthy, UNHEALTHY_THRESH)) else: if(os.path.exists(v)): file = open(str(v), "r") unhealthy = int(file.read()) file.close() if unhealthy < UNHEALTHY_THRESH: os.remove(v) else: msg = msg + "{} is back up.".format(v) print "ALERT: Tunnel is back up. SNS sent to {}".format(args.sns) sns.publish(args.sns, message=msg, subject="AWS VPN STATUS: VPN [{}] is back up.".format(v)) os.remove(v)
file = open(str(v), "r+") unhealthy = int(file.read()) + 1 file.seek(0) file.write(str(unhealthy)) file.close() if unhealthy >= UNHEALTHY_THRESH: msg = msg + "{} is down. Unhealthy {} >= {}".format( v, unhealthy, UNHEALTHY_THRESH) if v not in IGNORE_VPN: if unhealthy == UNHEALTHY_THRESH or unhealthy % 10 == 0: print "ALERT: Tunnel is down. SNS sent to {}".format( args.sns) sns.publish( args.sns, message=msg, subject="AWS ALERT! VPN [{}] is Down! {} >= {}" .format(v, unhealthy, UNHEALTHY_THRESH)) else: if (os.path.exists(v)): file = open(str(v), "r") unhealthy = int(file.read()) file.close() if unhealthy < UNHEALTHY_THRESH: os.remove(v) else: msg = msg + "{} is back up.".format(v) print "ALERT: Tunnel is back up. SNS sent to {}".format( args.sns) sns.publish( args.sns,
# Clean up old snapshots try: if not dry_run: count_deletes += clean_snapshots( ) # Do it, and add deletes to global counter except Exception as e: errmsg = True logging.error("%s/%s: Error cleaning old snapshots for volume: %s", instance.id, volume.id, e) count_errors += 1 # Finish up the log file... logging.info("Finished processing snapshots") logging.info("Volumes processed: %s", str(count_processed)) logging.info("Volumes ignored: %s", str(count_ignores)) logging.info("Volumes skipped (frequency): %s", str(count_skips)) logging.info("Volumes skipped (missing Tag): %s", str(count_skips_tag)) logging.info("Snapshots created: %s", str(count_creates)) logging.info("Snapshots deleted: %s", str(count_deletes)) logging.info("Errors: %s", str(count_errors)) # Report outcome to SNS (if configured AND not dry run) # Only send SNS when: 1. has error 2. create or delete snapshot if sns_arn and not dry_run: snsConsole.flush() if errmsg: sns.publish(sns_arn, snsStream.getvalue(), 'Error with AWS Snapshot') elif (count_creates + count_deletes) > 0: sns.publish(sns_arn, snsStream.getvalue(), 'Finished AWS snapshotting')
print 'ERR 2: ' + err.message # Yes, this is actually the official way: # http://stackoverflow.com/questions/22227262/aws-boto-sns-get-endpoint-arn-by-device-token result_re = re.compile(r'Endpoint(.*)already', re.IGNORECASE) result = result_re.search(err.message) if result: endpoint_arn = result.group(0).replace('Endpoint ', '').replace( ' already', '') else: raise try: sns.publish(target_arn=endpoint_arn, message_structure='json', message=json.dumps(message)) # print('PUBLISHED DEV: ' + token) except boto.exception.BotoServerError, err: print 'FAILED SENDING 2 TO ' + token print 'ERR: ' + err.message result_re = re.compile(r'Endpoint(.*)disabled', re.IGNORECASE) result = result_re.search(err.message) if result: for point in DataPoint.objects.filter( source=source, generator_identifier='pdk-app-event', secondary_identifier='pdk-ios-device-token-sandbox' ).order_by('created'):
region=region, ) try: endpoint_response = sns.create_platform_endpoint( platform_application_arn= 'arn:aws:sns:eu-west-1:123456879:app/APNS_SANDBOX/Myapp_Dev', token=device_id, ) endpoint_arn = endpoint_response['CreatePlatformEndpointResponse'][ 'CreatePlatformEndpointResult']['EndpointArn'] except boto.exception.BotoServerError, err: # Yes, this is actually the official way: # http://stackoverflow.com/questions/22227262/aws-boto-sns-get-endpoint-arn-by-device-token result_re = re.compile(r'Endpoint(.*)already', re.IGNORECASE) result = result_re.search(err.message) if result: endpoint_arn = result.group(0).replace('Endpoint ', '').replace(' already', '') else: raise print "ARN:", endpoint_arn publish_result = sns.publish( target_arn=endpoint_arn, message=body, ) print "PUBLISH" pprint.pprint(publish_result)
# Clean up old snapshots try: if get_config('dry_run') is None: count_deletes += clean_snapshots() # Do it, and add deletes to global counter except Exception as e: errmsg = True logging.error("%s/%s: Error cleaning old snapshots for volume: %s", instance.id, volume.id, e) count_errors += 1 # Finish up the log file... logging.info("Finished processing snapshots") logging.info("Volumes processed: %s", str(count_processed)) logging.info("Volumes ignored: %s", str(count_ignores)) logging.info("Volumes skipped: %s", str(count_skips)) logging.info("Snapshots created: %s", str(count_creates)) logging.info("Snapshots deleted: %s", str(count_deletes)) logging.info("Errors: %s", str(count_errors)) # Report outcome to SNS (if configured AND not dry run) # Only send SNS when: 1. has error 2. create or delete snapshot if sns_arn and get_config('dry_run') is None: snsConsole.flush() if errmsg: sns.publish(sns_arn, snsStream.getvalue(), 'Error with AWS Snapshot') elif (count_creates + count_deletes) > 0: sns.publish(sns_arn, snsStream.getvalue(), 'Finished AWS snapshotting')
def twilio_response(request): global clients, firstTimeclients sns = boto.sns.SNSConnection("AKIAIMRF4NLL75DXLOWA", "DtFZ9z5IAitkvMkMty8sy/KQ+1j5qmuCj9Lrow4Q") text = re.sub(r'\W+', ' ', request.params['Body']) text_body = text.strip() text_from = request.params['From'] body = text_body.split() containsNum = num_finder(text_body) try: if containsNum: if body[0].upper() == "REMOVE": subject = "+1" + str(body[1]) clients.pop("+1" + str(body[1])) firstTimeclients.remove(subject) message2 = subject + " request has been fulfilled." message = message2 sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + subject) elif text_from not in firstTimeclients: firstTimeclients += [text_from] message2 = "Name: " + str(body[0]) + "\nLocation: " + list_str(body[1:]) message = "Your message was received. If you need to change your room, text your new room number and building. E.g. \'325 Soda\' To cancel, text \'undo\'" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) clients[text_from] = text_body else: message = "The EECS Helpdesk has recieved your request and will send help to your updated room." message2 = str(clients[text_from].split()[0]) + " has changed location. \nNew location: " + text_body sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) clients[text_from] = str(clients[text_from].split()[0]) + " " + text_body elif body[0].upper() == "WIFI": message = "The EECS Helpdesk got your message. Please respond with your first name, the room #, and building name on the poster. E.g. \'Joe 326 Soda\'" message2 = "A Wifi problem was detected, awaiting response." sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) elif body[0].upper() == "RETRIEVE": hall_request = body[1].upper() message = "" message2 = "Current requests for " + hall_request + ":\n" for user in clients: if hall_request in clients[user].upper(): message = message + clients[user] + " " message2 = message2 + clients[user] + "\n" if message == "": message = "none" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Help requests in " + hall_request) elif body[0].upper() == "UNDO": clients.pop(text_from) firstTimeclients.remove(text_from) message = "Your request had been received and cancelled" message2 = text_from + " cancelled request for assistance" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) else: message = "We are sorry, but we do not recognize this request" except: message = "We are sorry, but there was a problem with your request" resp = twilio.twiml.Response() resp.sms(message) return str(resp)
def done(request): sns = boto.sns.SNSConnection("AKIAIMRF4NLL75DXLOWA", "DtFZ9z5IAitkvMkMty8sy/KQ+1j5qmuCj9Lrow4Q") sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", "A user has requested help from the mobile help desk.") return render_to_response('done.html')
#!/usr/bin/env python """Script to upload packages to s3 and notify repoupdate-daemon.""" import os import optparse import boto import boto.sns parser = optparse.OptionParser() parser.add_option('--bucket', default='packages.example.com') parser.add_option('--repopath', default='development/x86_64') parser.add_option('--region', default='us-east-1') parser.add_option('--sns-topic', default='arn:aws:sns:us-east-1:123:packages-new') options, args = parser.parse_args() sns = boto.sns.connect_to_region(options.region) bucket = boto.connect_s3().get_bucket(options.bucket, validate=False) for rpmfile in args: filename = os.path.split(rpmfile)[1] key = bucket.new_key(os.path.join(options.repopath, filename)) key.set_contents_from_filename(rpmfile) sns.publish(options.sns_topic, filename, options.repopath)
#p = 0 while True: try: my_queue = conn.get_queue('myqueue6') rs = my_queue.get_messages() #a = len(rs) #print a #while len(rs) != 0: m = rs[0] #m.get_body() text=ast.literal_eval(m.get_body()) text1=text["text"] text2=text["coordinates"] print text1 print text2 res = alchemyapi.sentiment("text", text1, {'sentiment': 1}) sentiment = res["docSentiment"]["type"] print sentiment #print "Sentiment: ", sentiment subj = "SNS message over boto" msg = "sentiment analysis is done." response = sns.publish( topic=mytopic_arn, message=text1, subject=str(text2[0])+","+str(text2[1])+","+str(sentiment) ) #mytopic_arn, msg, [text1,text2,"sentiment"]) my_queue.delete_message(m) except: pass
count_success += 1 result = 'Finished taking snapshots at %(timestamp)s with %(count_success)s snapshots out of %(count_total)s possible.\n' % { 'timestamp': datetime.datetime.utcnow().strftime('%d-%m-%Y %H:%M:%S'), 'count_success': count_success, 'count_total': count_total } result += "Total snapshots created: %d\n" % total_created result += "Total snapshots deleted: %d\n" % total_deleted result += "Total snapshots errors: %d\n" % count_errors sns_msg += result # Not finding any volumes is considered an error if not vols: msg = u'No volumes found' logger.error(msg) sns_err_msg += msg # SNS reporting if sns_arn: if sns_err_msg: sns_err_msg = 'Some of the volumes could not be processed. See the logs for more detailed info.\n\n' + sns_err_msg sns.publish(sns_arn, sns_err_msg, 'Error with AWS Snapshot') sns.publish(sns_arn, sns_msg, 'Finished taking AWS snapshots') logger.info(result) if sns_err_msg: sys.exit(1)
except: print "Unexpected error:", sys.exc_info()[0] logging.error('Error in processing volume with id: ' + vol.id) errmsg += 'Error in processing volume with id: ' + vol.id count_errors += 1 else: count_success += 1 result = '\nFinished making snapshots at %(date)s with %(count_success)s snapshots of %(count_total)s possible.\n\n' % { 'date': datetime.today().strftime('%d-%m-%Y %H:%M:%S'), 'count_success': count_success, 'count_total': count_total } message += result message += "\nTotal snapshots created: " + str(total_creates) message += "\nTotal snapshots errors: " + str(count_errors) message += "\nTotal snapshots deleted: " + str(total_deletes) + "\n" print '\n' + message + '\n' print result # SNS reporting if sns_arn: if errmsg: sns.publish(sns_arn, 'Error in processing volumes: ' + errmsg, 'Error with AWS Snapshot') sns.publish(sns_arn, message, 'Finished AWS snapshotting') logging.info(result)
def twilio_response(request): global clients, firstTimeclients sns = boto.sns.SNSConnection("AKIAIMRF4NLL75DXLOWA", "DtFZ9z5IAitkvMkMty8sy/KQ+1j5qmuCj9Lrow4Q") text = re.sub(r'\W+', ' ', request.params['Body']) text_body = text.strip() text_from = request.params['From'] body = text_body.split() containsNum = num_finder(text_body) try: if containsNum: if body[0].upper() == "REMOVE": subject = "+1" + str(body[1]) clients.pop("+1" + str(body[1])) firstTimeclients.remove(subject) message2 = subject + " request has been fulfilled." message = message2 sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + subject) elif text_from not in firstTimeclients: firstTimeclients += [text_from] message2 = "Name: " + str(body[0]) + "\nLocation: " + list_str( body[1:]) message = "Your message was received. If you need to change your room, text your new room number and building. E.g. \'325 Soda\' To cancel, text \'undo\'" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) clients[text_from] = text_body else: message = "The EECS Helpdesk has recieved your request and will send help to your updated room." message2 = str(clients[text_from].split( )[0]) + " has changed location. \nNew location: " + text_body sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) clients[text_from] = str( clients[text_from].split()[0]) + " " + text_body elif body[0].upper() == "WIFI": message = "The EECS Helpdesk got your message. Please respond with your first name, the room #, and building name on the poster. E.g. \'Joe 326 Soda\'" message2 = "A Wifi problem was detected, awaiting response." sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) elif body[0].upper() == "RETRIEVE": hall_request = body[1].upper() message = "" message2 = "Current requests for " + hall_request + ":\n" for user in clients: if hall_request in clients[user].upper(): message = message + clients[user] + " " message2 = message2 + clients[user] + "\n" if message == "": message = "none" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Help requests in " + hall_request) elif body[0].upper() == "UNDO": clients.pop(text_from) firstTimeclients.remove(text_from) message = "Your request had been received and cancelled" message2 = text_from + " cancelled request for assistance" sns.publish("arn:aws:sns:us-east-1:820374392987:Wifi_help", message2, "Wifi problem detected by " + text_from) else: message = "We are sorry, but we do not recognize this request" except: message = "We are sorry, but there was a problem with your request" resp = twilio.twiml.Response() resp.sms(message) return str(resp)
'arn:aws:sns:eu-central-1:727045919079:app/APNS_SANDBOX/Hoo', token=device_id, ) endpoint_arn = endpoint_response['CreatePlatformEndpointResponse'][ 'CreatePlatformEndpointResult']['EndpointArn'] except boto.exception.BotoServerError, err: # Yes, this is actually the official way: # http://stackoverflow.com/questions/22227262/aws-boto-sns-get-endpoint-arn-by-device-token result_re = re.compile(r'Endpoint(.*)already', re.IGNORECASE) result = result_re.search(err.message) if result: endpoint_arn = result.group(0).replace('Endpoint ', '').replace(' already', '') else: raise print "ARN:", endpoint_arn body = {'aps': {'alert': body, 'sound': 'default'}} body_json = json.dumps(body, ensure_ascii=False) message = {'default': 'The default message', 'APNS_SANDBOX': body_json} MESSAGE_JSON = json.dumps(message, ensure_ascii=False) publish_result = sns.publish( target_arn=endpoint_arn, message=MESSAGE_JSON, message_structure='json', ) print "PUBLISH" pprint.pprint(publish_result)
alchemyapi = AlchemyAPI() #p = 0 while True: try: my_queue = conn.get_queue('myqueue6') rs = my_queue.get_messages() #a = len(rs) #print a #while len(rs) != 0: m = rs[0] #m.get_body() text = ast.literal_eval(m.get_body()) text1 = text["text"] text2 = text["coordinates"] print text1 print text2 res = alchemyapi.sentiment("text", text1, {'sentiment': 1}) sentiment = res["docSentiment"]["type"] print sentiment #print "Sentiment: ", sentiment subj = "SNS message over boto" msg = "sentiment analysis is done." response = sns.publish(topic=mytopic_arn, message=text1, subject=str(text2[0]) + "," + str(text2[1]) + "," + str(sentiment)) #mytopic_arn, msg, [text1,text2,"sentiment"]) my_queue.delete_message(m) except: pass
apns_dict = {'aps':{'alert':out, 'sound':soundName, 'badge':badgeCount, 'b_t':notificationType, 'b_uID':messageAuthorID,'b_url':blixt_url}} # Send the push notification via SNS # pprint('Pushing notification to ddbuserID: '+ddbUserID) apns_string = json.dumps(apns_dict,ensure_ascii=True) if (useDebug): message = {'default':'default message','APNS_SANDBOX':apns_string} else: message = {'default':'default message','APNS':apns_string} messageJSON = json.dumps(message,ensure_ascii=False) pprint(messageJSON) pprint('device arn: ' + device_arn) results = sns.publish(message=messageJSON,target_arn=device_arn,message_structure='json') # Update the badge count in DynamoDB device['bxt_badgeCount'] = badgeCount device.save() pprint('push notification complete without crashing') else: pprint('shouldNotify was NO') else: pprint('UserID from DDB: ' + ddbUserID + ' Does not match userIDToNotify: ' + userIDToNotify) # Increment the index for the for loop i += 1 else: