def get_sns_alert_function(self):
     """Get a lamdda function for SNS alert publishing
     """
     if self.sns_topic_arn is None:
         return None
     return lambda message, subject: \
         SNSConnection().publish(self.sns_topic_arn, message, subject)
Example #2
0
def connect_sns(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
    """
    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.sns.SNSConnection`
    :return: A connection to Amazon's SNS
    """
    from boto.sns import SNSConnection
    return SNSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
Example #3
0
    def updateActions(self, actions):
        '''Update the actions on this account based on the supplied actions. Actions
        should a dictionary of Amazon Simple Notification Service topic names, and
        their associated subscriptions.'''
        # First, we need a SNS Connection to make this changes
        conn = SNSConnection(**self.kwargs)
        # Now make sure each subscription is registered to the topic
        for name, subscriptions in actions.items():
            logger.info('Creating topic %s' % name)
            # Try to make a topic
            try:
                arn = conn.create_topic(name)['CreateTopicResponse'][
                    'CreateTopicResult']['TopicArn']
                self.actions[name] = arn
            except KeyError:
                raise EmitterException('Bad response creating topic %s' % name)

            if len(subscriptions) == 0:
                raise EmitterException('No subscriptions for action %s' % name)
            # Now try to arrange for subscriptions
            # Oddly enough, calling create_topic doesn't have any effect
            # if the topic already exists, but calling subscribe() for an
            # existing subscription causes a second subscription to be added
            # So, we have to get a list of current subscriptions, and then
            # make sure to only add the subscription if it's currently there
            logger.info('Getting a list of current subscriptions...')
            current = conn.get_all_subscriptions_by_topic(arn)
            current = current['ListSubscriptionsByTopicResponse']
            current = current['ListSubscriptionsByTopicResult']
            current = current['Subscriptions']
            current = set(s['Endpoint'] for s in current)
            # For all desired subscriptions not present, subscribe
            for s in subscriptions:
                if s['endpoint'] not in current:
                    logger.info('Adding %s to action %s' %
                                (s['endpoint'], name))
                    conn.subscribe(arn, s.get('protocol', 'email'),
                                   s['endpoint'])
                else:
                    logger.info('%s already subscribed to action' %
                                s['endpoint'])
            # Check for subscriptions that are active, but not listed...
            activeUnlisted = set(current) - set(
                [s['endpoint'] for s in subscriptions])
            for s in activeUnlisted:
                logger.warn('Subscript "%s" active, but not listed in config' %
                            s)
Example #4
0
elif node_env == 'staging':
  key_prefix = 'efins_dbbackups/staging'
else:
  node_env = node_env if node_env else '-none-'
  print("NODE_ENV '" + node_env + "' is not valid.  Try setting it to one of {production, staging, development}.")
  exit(-1) 


# Get a SNS connection so we can report on our status
regions = boto.sns.regions()
myRegion = ''
for region in regions:
  print region
  if region.name == 'us-west-2':
    myRegion = region
sns = SNSConnection('AKIAIBYUXG6UOLBFSRKA', 'FvFbsW2C9rS9ayA1AHvHmBqL07iU+oz5X803xdot', region=myRegion)


# Take the DB backup
db_suffix = "_" + node_env if node_env in ["development", "staging"] else ""
db_name = "efins" + db_suffix
print("Starting the backup of " + db_name)
now = datetime.datetime.now()
backup_name = "%s_%s_%s_%s_%s_%s_%s" % (now.month, now.day, now.year, now.hour, now.minute, now.second, os.getenv('USER'))
print backup_name
backup_path = "/tmp/%s" % backup_name
print("Dumping to %s" % backup_path)


orig_dir = os.getcwd()
os.chdir('/tmp')
Example #5
0
def dependency_check():
    parser = argparse.ArgumentParser()
    parser.add_argument('--dependencies', type=str, nargs='+', default=[])
    parser.add_argument('--dependencies_ok_to_fail',
                        type=str,
                        nargs='+',
                        default=[])
    parser.add_argument('--pipeline_name', dest='pipeline_name')
    parser.add_argument('--refresh_rate', dest='refresh_rate', default='900')
    parser.add_argument('--start_date', dest='start_date')
    parser.add_argument('--sns_topic_arn', dest="sns_topic_arn")

    args = parser.parse_args()

    # Exit if there are no dependencies
    if not args.dependencies and not args.dependencies_ok_to_fail:
        sys.exit()

    # Create mapping from pipeline name to id
    pipeline_name_to_id = dict(
        (pipeline['name'], pipeline['id']) for pipeline in list_pipelines())

    # Remove whitespace from dependency lists
    dependencies = map(str.strip, args.dependencies)
    dependencies_to_ignore = map(str.strip, args.dependencies_ok_to_fail)

    # Add the dependencies which can fail to the list of dependencies
    dependencies.extend(dependencies_to_ignore)

    # Check if all dependencies are valid pipelines
    for dependency in dependencies:
        if dependency not in pipeline_name_to_id:
            raise Exception('Pipeline not found: %s.' % dependency)

    # Map from dependency id to name
    dependencies = {pipeline_name_to_id[dep]: dep for dep in dependencies}

    print 'Start checking for dependencies'
    start_time = datetime.now()

    failures = []
    dependencies_ready = False

    # Loop until all dependent pipelines have finished or failed
    while not dependencies_ready:
        print 'checking'
        dependencies_ready, new_failures = check_dependencies_ready(
            dependencies, args.start_date, dependencies_to_ignore)
        failures.extend(new_failures)
        if not dependencies_ready:
            time.sleep(float(args.refresh_rate))

    # Send message through SNS if there are failures
    if failures:
        if args.sns_topic_arn:
            message = 'Failed dependencies: %s.' % ', '.join(set(failures))
            subject = 'Dependency error for pipeline: %s.' % args.pipeline_name
            SNSConnection().publish(args.sns_topic_arn, message, subject)
        else:
            raise Exception('ARN for SNS topic not specified in ETL config')

    print 'Finished checking for dependencies. Total time spent: ',
    print(datetime.now() - start_time).total_seconds(), ' seconds'
evaluationId = environ['EVALUATION_ID']
numerateBucketAccessEmailAddress= environ['NUMERATE_BUCKET_ACCESS_EMAIL_ADDRESS']
synapseUserId = environ['SYNAPSE_USER_ID']
synapseUserPw = environ['SYNAPSE_USER_PW']
snsTopic = environ['SNS_TOPIC']
# we allow multiple AWS accounts.  this gets us past the 100 bucket per account limit
synapseAccessKeyProjectId=environ["SYNAPSE_ACCESS_KEY_PROJECT_ID"]

s3Connections = []
iamConnections = []
i=0
while True:
    aws_access_key_id = environ.get("AWS_ACCESS_KEY_ID_"+str(i+1))
    aws_secret_access_key = environ.get("AWS_SECRET_ACCESS_KEY_"+str(i+1))
    if (i==0):
        snsConnection = SNSConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)

    if ((aws_access_key_id is None) or (aws_secret_access_key is None)):
        break
    else:
        s3Connections.append(S3Connection(aws_access_key_id, aws_secret_access_key))
        iamConnections.append(IAMConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key))
        i=i+1
        
if (len(s3Connections)==0):
    raise("No AWS crdentials provided")

MAXIMUM_USER_NAME_LENGTH = 63

## connect to Synapse
syn = Synapse()