def publish_msg_to_sns_topic(region, aws_access_key, aws_secret_key, topic, message, subject): connect_to_region(region) conn = SNSConnection(aws_access_key, aws_secret_key) conn.publish(topic, message, subject)
def __get_connection_SNS(): """ Ensure connection to SNS """ region = get_global_option('region') try: if (get_global_option('aws_access_key_id') and get_global_option('aws_secret_access_key')): logger.debug( 'Authenticating to SNS using ' 'credentials in configuration file') connection = sns.connect_to_region( region, aws_access_key_id=get_global_option( 'aws_access_key_id'), aws_secret_access_key=get_global_option( 'aws_secret_access_key')) else: logger.debug( 'Authenticating using boto\'s authentication handler') connection = sns.connect_to_region(region) except Exception as err: logger.error('Failed connecting to SNS: {0}'.format(err)) logger.error( 'Please report an issue at: ' 'https://github.com/sebdah/dynamic-dynamodb/issues') raise logger.debug('Connected to SNS in {0}'.format(region)) return connection
def __get_connection_SNS(): """ Ensure connection to SNS """ try: if get_global_option("aws_access_key_id") and get_global_option("aws_secret_access_key"): logger.debug("Authenticating to SNS using " "credentials in configuration file") connection = sns.connect_to_region( get_global_option("region"), aws_access_key_id=get_global_option("aws_access_key_id"), aws_secret_access_key=get_global_option("aws_secret_access_key"), ) else: try: logger.debug("Authenticating to SNS using EC2 instance profile") metadata = get_instance_metadata(timeout=1, num_retries=1) connection = sns.connect_to_region( metadata["placement"]["availability-zone"][:-1], profile_name=metadata["iam"]["info"][u"InstanceProfileArn"], ) except KeyError: logger.debug("Authenticating to SNS using " "env vars / boto configuration") connection = sns.connect_to_region(get_global_option("region")) except Exception as err: logger.error("Failed connecting to SNS: {0}".format(err)) logger.error("Please report an issue at: " "https://github.com/sebdah/dynamic-dynamodb/issues") raise logger.debug("Connected to SNS in {0}".format(get_global_option("region"))) return connection
def __get_connection_SNS(): """ Ensure connection to SNS """ region = get_global_option('region') try: if (get_global_option('aws_access_key_id') and get_global_option('aws_secret_access_key')): logger.debug('Authenticating to SNS using ' 'credentials in configuration file') connection = sns.connect_to_region( region, aws_access_key_id=get_global_option('aws_access_key_id'), aws_secret_access_key=get_global_option( 'aws_secret_access_key')) else: logger.debug('Authenticating using boto\'s authentication handler') connection = sns.connect_to_region(region) except Exception as err: logger.error('Failed connecting to SNS: {0}'.format(err)) logger.error('Please report an issue at: ' 'https://github.com/sebdah/dynamic-dynamodb/issues') raise logger.debug('Connected to SNS in {0}'.format(region)) return connection
def __init__(self, environment, deployment, region, zone, template=template): # Create connections to AWS components self.cfn_connection = cfn.connect_to_region(region) self.sns_connection = sns.connect_to_region(region) self.vpc_connection = vpc.connect_to_region(region) self.iam_connection = iam.connect_to_region("universal") # Temporary python class -> directory name hack self.lab_dir = self.__class__.__name__.lower() self.stack_name = "-".join( [self.lab_dir, environment, deployment, region, zone]) if environment != '': self.notification_arns = self.get_sns_topic( "cloudformation-notifications-" + environment) self.parameters = [] # Prepare the CFN template self.template_url = "/".join([ os.path.dirname(os.path.realpath(__file__)), self.lab_dir, vpc_provider, template ]) self.template_body = self.read_file(self.template_url, max_template_size) self.validate_template()
def send_contact_email(self, contact): sns_conn = sns.connect_to_region( 'us-west-2', aws_access_key_id='AKIAJXN4QBOQO6TR7T5Q', aws_secret_access_key='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' ) msg = """ Name: {name} Company: {company} Phone: {phone} Email: {email} Message: {message} """.format( name=contact.name, company=contact.company, phone=contact.phone, email=contact.email, message=contact.message ) sns_conn.publish( u'arn:aws:sns:us-west-2:451326512542:ArchimedesCustomerContactTopic', msg, '[New Message] Archimedes Controls website received new message', )
def alert(self, matches): body = '' for match in matches: body += str(BasicMatchString(self.rule, match)) # Separate text of aggregated alerts with dashes if len(matches) > 1: body += '\n----------------------------------------\n' # use instance role if aws_access_key and aws_secret_key are not specified if not self.aws_access_key and not self.aws_secret_key: sns_client = sns.connect_to_region(self.aws_region) else: sns_client = sns.connect_to_region(self.aws_region, aws_access_key_id=self.aws_access_key, aws_secret_access_key=self.aws_secret_key) sns_client.publish(self.sns_topic_arn, body, subject=self.create_default_title()) elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn))
def send_push_notifications(body, org_id, last_editor_id): """Sends SNS push notifications to an org's devices""" from core.models import TweetCheckUser, Device from twitter.models import Tweet last_editor = TweetCheckUser.objects.get(pk=last_editor_id) arn_list = Device.objects.filter(user__organization__id=org_id).exclude(user=last_editor) \ .values_list('arn', flat=True) conn = sns.connect_to_region('us-east-1') if len(body) > 35: body = body[:35].rsplit(' ', 1)[0]+'...' alert = '{0} added a new tweet for review: "{1}"'.format(last_editor.get_short_name(), body) message = { 'aps': { 'alert': alert, 'badge': Tweet.get_pending_count(org_id), 'sound': 'default' } } sns_request = {settings.APNS_ARN: json.dumps(message)} for arn in arn_list: conn.publish(message=json.dumps(sns_request), message_structure='json', target_arn=arn)
def new_environment(options): sns_conn = sns.connect_to_region(options.region) s3_conn = s3.connect_to_region(options.region) ec2_conn = ec2.connect_to_region(options.region) consul_conn = Consul(options.host, options.port) environment_service = EnvironmentService(ec2_conn, s3_conn, sns_conn, consul_conn) return Environment(environment_service, options)
def cw_alarm(self, cw_conn, instance_id, email_address): """ Setup a CW alarm to send a notification - Assume you have CW enabled, you want to be notified when certain conditions arise. This make use of the Simple Notification Service (SNS) to send an email of CW events using alarms :param instance: Instance id which the alarm will be activated in :param email: Email address to notify the user when the alarm is raised """ # SNS instance sns = connect_to_region('eu-west-1') # Creating new topic in SNS topic = sns.create_topic('cpu_alarm') # Get topic ARN topic_arn = topic['CreateTopicResponse']['CreateTopicResult']['TopicArn'] # Subscribing provided email to notify when alarm is raised try: sns.subscribe(topic_arn, 'email', email_address) except boto.exception.BotoServerError: print "Invalid email address" return # Select CPU utilisation metric from provided instance id try: metric = cw_conn.list_metrics(dimensions={'InstanceId': instance_id}, metric_name="CPUUtilization")[0] except IndexError: print "Instance id", instance_id, "does not exist" return # Create alarm and set topic action # Alarm will trigger if 2 checks fail after 5 min, so if it was 10 min under 40% CPU utilisation metric.create_alarm(name='CPU alarm', comparison='<', threshold=40, period=300, evaluation_periods=2, statistic='Average', alarm_actions=[topic_arn], unit='Percent') print 'Alarm for instance', instance_id, 'created'
def save(self, *args, **kwargs): if not self.pk: conn = sns.connect_to_region('us-east-1') response = conn.create_platform_endpoint(settings.APNS_ARN, self.token) self.arn = response['CreatePlatformEndpointResponse']['CreatePlatformEndpointResult']['EndpointArn'] super(Device, self).save(*args, **kwargs)
def awsPushSNS(self, aws_acckey, aws_seckey, sns_arn, sns_regname, sbj=None): """ Amazon SNS (Simple Notification Service) reporting """ try: if sns_arn and aws_acckey: sns = connect_to_region(sns_regname, aws_access_key_id=aws_acckey, aws_secret_access_key=aws_seckey, validate_certs=False) if sbj: subject = "{0} - Finished AWS S3 backup!".format(sbj) else: subject = "Finished AWS S3 backup!" lg = ["[{0} {1}] - {2}".format(i.asctime, i.levelname, i.message) for i in self.__mh.buffer] message = "\n".join(lg) sns.publish(sns_arn, message, subject=subject) self.__logger.info("Message successfully sent with AWS SNS...") else: self.__logger.error("Message not sent with AWS Simple Notification Service...") return except Exception as err: self.__logger.error("AWS SNS error: {0}".format(err))
def alert(self, matches): body = self.create_alert_body(matches) # use aws_access_key and aws_secret_key if specified; then use boto profile if specified; # otherwise use instance role if not self.aws_access_key and not self.aws_secret_key: if not self.boto_profile: sns_client = sns.connect_to_region(self.aws_region) else: sns_client = sns.connect_to_region(self.aws_region, profile_name=self.boto_profile) else: sns_client = sns.connect_to_region(self.aws_region, aws_access_key_id=self.aws_access_key, aws_secret_access_key=self.aws_secret_key) sns_client.publish(self.sns_topic_arn, body, subject=self.create_title(matches)) elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn))
def alert(self, matches): body = '' for match in matches: body += unicode(BasicMatchString(self.rule, match)) # Separate text of aggregated alerts with dashes if len(matches) > 1: body += '\n----------------------------------------\n' # use instance role if aws_access_key and aws_secret_key are not specified if not self.aws_access_key and not self.aws_secret_key: sns_client = sns.connect_to_region(self.aws_region) else: sns_client = sns.connect_to_region(self.aws_region, aws_access_key_id=self.aws_access_key, aws_secret_access_key=self.aws_secret_key) sns_client.publish(self.sns_topic_arn, body, subject=self.create_default_title()) elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn))
def get_sns_connection(region='eu-west-1'): """ Creates a new AWS connection based upon the credentials defined in the django configuration :param region: the region of the DynamoDB, defaults to Ireland :return: a new dynamodb2 connection """ return sns.connect_to_region(region, aws_access_key_id=settings.AWS_ACCESS_KEY, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
def get_sns_connection(region='eu-west-1'): """ Creates a new AWS connection based upon the credentials defined in the django configuration :param region: the region of the DynamoDB, defaults to Ireland :return: a new dynamodb2 connection """ return sns.connect_to_region( region, aws_access_key_id=settings.AWS_ACCESS_KEY, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
def _connect(access_key, secret_key, region): from boto.sns import connect_to_region try: connection = connect_to_region( region_name=region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) except: raise Exception('AWS auth failed') return connection
def connect_sns(self): ''' Connect to SNS ''' try: for reg in sns.regions(): if (reg.name == os.environ['AWS_DEFAULT_REGION']): return sns.connect_to_region(reg.name) return None except Exception as e: print "%s" % e exit(-1)
def connect_sns(self): ''' Connect to SNS ''' try: for reg in sns.regions(): if(reg.name == os.environ['AWS_DEFAULT_REGION']): return sns.connect_to_region(reg.name) return None except Exception as e: print "%s" % e exit(-1)
def __get_connection_SNS(): """ Ensure connection to SNS """ try: if (get_global_option('aws_access_key_id') and get_global_option('aws_secret_access_key')): logger.debug( 'Authenticating to SNS using ' 'credentials in configuration file') region = get_global_option('region') connection = sns.connect_to_region( region, aws_access_key_id=get_global_option( 'aws_access_key_id'), aws_secret_access_key=get_global_option( 'aws_secret_access_key')) else: try: logger.debug( 'Authenticating to SNS using EC2 instance profile') metadata = get_instance_metadata(timeout=1, num_retries=1) region = metadata['placement']['availability-zone'][:-1] connection = sns.connect_to_region(region) except KeyError: logger.debug( 'Authenticating to SNS using ' 'env vars / boto configuration') region = get_global_option('region') connection = sns.connect_to_region(region) except Exception as err: logger.error('Failed connecting to SNS: {0}'.format(err)) logger.error( 'Please report an issue at: ' 'https://github.com/sebdah/dynamic-dynamodb/issues') raise logger.debug('Connected to SNS in {0}'.format(region)) return connection
def sns_connection(config): """ Initialize Boto SNS Client from FLask application configuration. Arguments: config (flask.config.Config) Return: sns_con (boto.sqs.connection.SNSConnection) """ sns_con = sns.connect_to_region(config['AWS_REGION'], aws_access_key_id=config['AWS_ACCESS_KEY_ID'], aws_secret_access_key=config['AWS_SECRET_ACCESS_KEY']) return sns_con
def __get_connection_SNS(): """ Ensure connection to SNS """ try: if (get_global_option('aws_access_key_id') and get_global_option('aws_secret_access_key')): logger.debug( 'Authenticating to SNS using ' 'credentials in configuration file') connection = sns.connect_to_region( get_global_option('region'), aws_access_key_id=get_global_option( 'aws_access_key_id'), aws_secret_access_key=get_global_option( 'aws_secret_access_key')) else: try: logger.debug( 'Authenticating to SNS using EC2 instance profile') metadata = get_instance_metadata(timeout=1, num_retries=1) connection = sns.connect_to_region( metadata['placement']['availability-zone'][:-1], profile_name=metadata['iam']['info'][u'InstanceProfileArn']) except KeyError: logger.debug( 'Authenticating to SNS using ' 'env vars / boto configuration') connection = sns.connect_to_region(get_global_option('region')) except Exception as err: logger.error('Failed connecting to SNS: {0}'.format(err)) logger.error( 'Please report an issue at: ' 'https://github.com/sebdah/dynamic-dynamodb/issues') raise logger.debug('Connected to SNS in {0}'.format(get_global_option('region'))) return connection
def __init__(self, environment, deployment, region, zone, aws_access_key_id, aws_secret_access_key): # Create connections to AWS components self.cfn_connection = cfn.connect_to_region(region, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) self.sns_connection = sns.connect_to_region(region, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) self.vpc_connection = vpc.connect_to_region(region, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) # Temporary python class -> directory name hack lab_dir = self.__class__.__name__.lower() self.stack_name = "-".join([lab_dir, environment, deployment, region, zone]) self.notification_arns = self.get_sns_topic("cloudformation-notifications-" + environment) self.parameters = [] # Prepare the CFN template self.template_url = "/".join([os.path.dirname(os.path.realpath(__file__)), lab_dir, vpc_provider, template]) self.template_body = self.read_file(self.template_url, max_template_size) self.validate_template()
def handle_sns_subscription(cls, resource_description, stack): logger = get_logger() queue_prefix = stack.name + '-' + resource_description['Properties']['QueueResourceName'] topic_region = resource_description['Properties'].get('TopicRegion', stack.region) topic_arn = cls.extract_topic_arn(resource_description, stack.parameters) sqs_conn = sqs.connect_to_region(stack.region) sns_conn = sns.connect_to_region(topic_region) queues = sqs_conn.get_all_queues(prefix=queue_prefix) if len(queues) != 1: raise CfnSphereException( "Found {0} queues matching the prefix: {1}. Should be 1.".format(len(queues), queue_prefix)) queue = queues[0] logger.info("Subscribing queue {0} to topic {1} in {2}".format(queue.name, topic_arn, topic_region)) sns_conn.subscribe_sqs_queue(topic_arn, queue)
def send_pns(arns, message, region, message_structure, dry_run): print "Send pns called with message: %s, dry_run: %s, %d arns, region: %s, message_structure: %s" % (message, str(dry_run), len(arns), region, message_structure) successes = [] failures = [] try: c = sns.connect_to_region(region) for arn in arns: try: if not dry_run: c.publish(None, message, None, arn, "json") else: # sleep to simiulate request time sleep(0.01) # 10 milliseconds successes.append(arn) except Exception as err: print "Error sending push notification: %s" % str(err) failures.append(arn) except Exception as e: print "Error connecting to SNS" return (successes, failures)
def send_pns(arns, message, region, message_structure, dry_run): print "Send pns called with message: %s, dry_run: %s, %d arns, region: %s, message_structure: %s" % ( message, str(dry_run), len(arns), region, message_structure) successes = [] failures = [] try: c = sns.connect_to_region(region) for arn in arns: try: if not dry_run: c.publish(None, message, None, arn, "json") else: # sleep to simiulate request time sleep(0.01) # 10 milliseconds successes.append(arn) except Exception as err: print "Error sending push notification: %s" % str(err) failures.append(arn) except Exception as e: print "Error connecting to SNS" return (successes, failures)
def send_contact_email(self, contact): sns_conn = sns.connect_to_region( 'us-west-2', aws_access_key_id='AKIAJXN4QBOQO6TR7T5Q', aws_secret_access_key='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') msg = """ Name: {name} Company: {company} Phone: {phone} Email: {email} Message: {message} """.format(name=contact.name, company=contact.company, phone=contact.phone, email=contact.email, message=contact.message) sns_conn.publish( u'arn:aws:sns:us-west-2:451326512542:ArchimedesCustomerContactTopic', msg, '[New Message] Archimedes Controls website received new message', )
def awsPushSNS(self, aws_acckey, aws_seckey, sns_arn, sns_regname, sbj=None): """ Amazon SNS (Simple Notification Service) reporting """ try: if sns_arn and aws_acckey: sns = connect_to_region(sns_regname, aws_access_key_id=aws_acckey, aws_secret_access_key=aws_seckey, validate_certs=False) if sbj: subject = "{0} - Finished AWS S3 backup!".format(sbj) else: subject = "Finished AWS S3 backup!" lg = [ "[{0} {1}] - {2}".format(i.asctime, i.levelname, i.message) for i in self.__mh.buffer ] message = "\n".join(lg) sns.publish(sns_arn, message, subject=subject) self.__logger.info("Message successfully sent with AWS SNS...") else: self.__logger.error( "Message not sent with AWS Simple Notification Service...") return except Exception as err: self.__logger.error("AWS SNS error: {0}".format(err))
def main(): parser = ArgumentParser( description='Used to generate the tiles from Amazon EC2, ' 'and get the SQS queue status', prog=sys.argv[0]) add_comon_options(parser) parser.add_argument('--deploy-config', default=None, dest="deploy_config", metavar="FILE", help='path to the deploy configuration file') parser.add_argument('--status', default=False, action="store_true", help='display the SQS queue status and exit') parser.add_argument('--disable-geodata', default=True, action="store_false", dest="geodata", help='disable geodata synchronisation') parser.add_argument('--disable-code', default=True, action="store_false", dest="deploy_code", help='disable deploy application code') parser.add_argument('--disable-database', default=True, action="store_false", dest="deploy_database", help='disable deploy database') parser.add_argument('--disable-fillqueue', default=True, action="store_false", dest="fill_queue", help='disable queue filling') parser.add_argument('--disable-tilesgen', default=True, action="store_false", dest="tiles_gen", help='disable tile generation') parser.add_argument('--host', default=None, help='The host used to generate tiles') parser.add_argument('--shutdown', default=False, action="store_true", help='Shut done the remote host after the task.') parser.add_argument('--wait', default=False, action="store_true", help='Wait that all the tasks will finish.') parser.add_argument('--local', default=False, action="store_true", help='Run the generation locally') options = parser.parse_args() gene = TileGeneration(options.config, options, layer_name=options.layer) if options.status: # pragma: no cover status(options, gene) sys.exit(0) if 'ec2' not in gene.config: # pragma: no cover print("EC2 not configured") sys.exit(1) if options.deploy_config is None: options.deploy_config = gene.config['ec2']['deploy_config'] if options.geodata: options.geodata = not gene.config['ec2']['disable_geodata'] if options.deploy_code: options.deploy_code = not gene.config['ec2']['disable_code'] if options.deploy_database: options.deploy_database = not gene.config['ec2']['disable_database'] if options.fill_queue: # pragma: no cover options.fill_queue = not gene.config['ec2']['disable_fillqueue'] if options.tiles_gen: # pragma: no cover options.tiles_gen = not gene.config['ec2']['disable_tilesgen'] # start aws if not options.host: # TODO not implemented yet host = aws_start(gene.config['ec2']['host_type']) # pragma: no cover else: host = options.host if not options.local and options.geodata and 'geodata_folder' in gene.config[ 'ec2']: # pragma: no cover print("==== Sync geodata ====") ssh_options = '' if 'ssh_options' in gene.config['ec2']: # pragma: no cover ssh_options = gene.config['ec2']['ssh_options'] # sync geodata run_local([ 'rsync', '--delete', '-e', 'ssh ' + ssh_options, '-r', gene.config['ec2']['geodata_folder'], host + ':' + gene.config['ec2']['geodata_folder'] ]) if options.deploy_code and not options.local: print("==== Sync and build code ====") cmd = [ 'rsync', '--delete', ] if 'ssh_options' in gene.config['ec2']: # pragma: no cover cmd += ['-e', 'ssh ' + gene.config['ec2']['ssh_options']] ssh_options = gene.config['ec2']['ssh_options'] project_dir = gene.config['ec2']['code_folder'] cmd += ['-r', '.', host + ':' + project_dir] run_local(cmd) for cmd in gene.config['ec2']['build_cmds']: run(options, cmd % environ, host, project_dir, gene) if 'apache_content' in gene.config[ 'ec2'] and 'apache_config' in gene.config['ec2']: run( options, 'echo %s > %s' % (gene.config['ec2']['apache_content'], gene.config['ec2']['apache_config']), host, project_dir, gene) run(options, 'sudo apache2ctl graceful', host, project_dir, gene) # deploy if options.deploy_database and not options.local: _deploy(gene, host) if options.deploy_code or options.deploy_database \ or options.geodata and not options.local: # TODO not implemented yet create_snapshot(host, gene) if options.time: arguments = _get_arguments(options) arguments.extend(['--role', 'local']) arguments.extend(['--time', str(options.time)]) project_dir = None if options.local else gene.config['ec2'][ 'code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( "%sgenerate_tiles %s" % (_get_path(), ' '.join([str(a) for a in arguments])), host, project_dir, gene)) tiles_size = [] times = [] for p in processes: results = p.communicate() if results[1] != '': # pragma: no cover logger.debug('ERROR: %s' % results[1]) if PY3: results = [r.decode('utf-8') for r in results] results = (re.sub(u'\n[^\n]*\r', u'\n', results[0]), ) results = (re.sub(u'^[^\n]*\r', u'', results[0]), ) for r in results[0].split('\n'): if r.startswith('time: '): times.append(int(r.replace('time: ', ''))) elif r.startswith('size: '): tiles_size.append(int(r.replace('size: ', ''))) if len(times) == 0: # pragma: no cover logger.error("Not enough data") sys.exit(1) mean_time = reduce(lambda x, y: x + y, [timedelta(microseconds=int(r)) for r in times], timedelta()) / len(times)**2 mean_time_ms = mean_time.seconds * 1000 + mean_time.microseconds / 1000.0 mean_size = reduce(lambda x, y: x + y, [int(r) for r in tiles_size], 0) / len(tiles_size) mean_size_kb = mean_size / 1024.0 print('==== Time results ====') print('A tile is generated in: %0.3f [ms]' % mean_time_ms) print('Then mean generated tile size: %0.3f [kb]' % (mean_size_kb)) print('''config: cost: tileonly_generation_time: %0.3f tile_generation_time: %0.3f metatile_generation_time: 0 tile_size: %0.3f''' % (mean_time_ms, mean_time_ms, mean_size_kb)) if options.shutdown: # pragma: no cover run(options, 'sudo shutdown 0', host, project_dir, gene) sys.exit(0) if options.fill_queue and not options.local: # pragma: no cover print("==== Till queue ====") # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'master', '--quiet']) project_dir = gene.config['ec2']['code_folder'] run_remote_process( options, "%sgenerate_tiles %s" % (_get_path(), ' '.join([str(a) for a in arguments])), host, project_dir, gene) sleep(5) attributes = gene.get_sqs_queue().get_attributes() print("\rTiles to generate: %s/%s" % ( attributes['ApproximateNumberOfMessages'], attributes['ApproximateNumberOfMessagesNotVisible'], )) if options.tiles_gen: # pragma: no cover print("==== Generate tiles ====") if options.wait and not options.local: print("") class Status(Thread): def run(self): # pragma: no cover while True: attributes = gene.get_sqs_queue().get_attributes() print("\rTiles to generate/generating: %s/%s" % ( attributes['ApproximateNumberOfMessages'], attributes['ApproximateNumberOfMessagesNotVisible'], )) sleep(1) status_thread = Status() status_thread.setDaemon(True) status_thread.start() arguments = _get_arguments(options) arguments.extend(['--quiet']) if not options.local: arguments.extend(['--role', 'slave']) project_dir = None if options.local else gene.config['ec2'][ 'code_folder'] threads = [] for i in range(gene.config['ec2']['number_process']): if options.local: threads.append( run_local_process( "%sgenerate_tiles --local-process-number %i %s" % (_get_path(), i, ' '.join([str(a) for a in arguments])))) else: run_remote_process( "%sgenerate_tiles %s" % (_get_path(), ' '.join([str(a) for a in arguments])), host, project_dir, gene) print('Tile generation started') if options.shutdown: run(options, 'sudo shutdown 0') if options.wait and options.local: while len(threads) > 0: threads = [t for t in threads if t.is_alive()] sleep(1) if 'sns' in gene.config: if 'region' in gene.config['sns']: connection = sns.connect_to_region( gene.config['sns']['region']) else: connection = boto.connect_sns() connection.publish( gene.config['sns']['topic'], """The tile generation is finish Host: %(host)s Command: %(cmd)s""" % { 'host': socket.getfqdn(), 'cmd': ' '.join([quote(arg) for arg in sys.argv]) }, "Tile generation controller")
def _gene(self, options, gene, layer, dimensions={}): count_metatiles = None count_metatiles_dropped = Count() count_tiles = None count_tiles_dropped = Count() count_tiles_stored = None if options.get_bbox: try: tilecoord = parse_tilecoord(options.get_bbox) print("Tile bounds: [%i,%i,%i,%i]" % gene.layer['grid_ref']['obj'].extent(tilecoord)) exit() except ValueError as e: # pragma: no cover exit( "Tile '%s' is not in the format 'z/x/y' or z/x/y:+n/+n\n%r" % (options.get_bbox, e)) if options.get_hash: options.role = 'hash' options.test = 1 sqs_tilestore = None if options.role in ('master', 'slave'): # Create SQS queue sqs_tilestore = SQSTileStore( gene.get_sqs_queue()) # pragma: no cover cache_tilestore = None if options.role in ('local', 'slave'): cache_tilestore = gene.get_tilesstore(options.cache, dimensions) meta = gene.layer['meta'] if options.tiles: gene.set_store(TilesFileStore(options.tiles)) elif options.role in ('local', 'master'): # Generate a stream of metatiles gene.init_tilecoords() gene.add_geom_filter() if options.local_process_number is not None: # pragma: no cover gene.add_local_process_filter() elif options.role == 'slave': # Get the metatiles from the SQS queue gene.set_store(sqs_tilestore) # pragma: no cover elif options.role == 'hash': try: z, x, y = (int(v) for v in options.get_hash.split('/')) if meta: gene.set_tilecoords( [TileCoord(z, x, y, gene.layer['meta_size'])]) else: gene.set_tilecoords([TileCoord(z, x, y)]) except ValueError as e: # pragma: no cover exit("Tile '%s' is not in the format 'z/x/y'\n%r" % (options.get_hash, e)) # At this stage, the tilestream contains metatiles that intersect geometry gene.add_logger() count_metatiles = gene.counter() if options.role == 'master': # pragma: no cover # Put the metatiles into the SQS queue gene.put(sqs_tilestore) count_tiles = gene.counter() elif options.role in ('local', 'slave', 'hash'): if gene.layer['type'] == 'wms': params = gene.layer['params'].copy() if 'STYLES' not in params: params['STYLES'] = ','.join( gene.layer['wmts_style'] for l in gene.layer['layers'].split(',')) if gene.layer['generate_salt']: params['SALT'] = str(random.randint(0, 999999)) params.update(dimensions) # Get the metatile image from the WMS server gene.get( URLTileStore( tilelayouts=(WMSTileLayout( url=gene.layer['url'], layers=gene.layer['layers'], srs=gene.layer['grid_ref']['srs'], format=gene.layer['mime_type'], border=gene.layer['meta_buffer'] if meta else 0, tilegrid=gene.get_grid()['obj'], params=params, ), ), headers=gene.layer['headers'], ), "Get tile from WMS") elif gene.layer['type'] == 'mapnik': # pragma: no cover from tilecloud.store.mapnik_ import MapnikTileStore from tilecloud_chain.mapnik_ import MapnikDropActionTileStore grid = gene.get_grid() if gene.layer['output_format'] == 'grid': count_tiles = gene.counter() gene.get( MapnikDropActionTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], resolution=gene.layer['resolution'], layers_fields=gene.layer['layers_fields'], drop_empty_utfgrid=gene. layer['drop_empty_utfgrid'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, proj4_literal=grid['proj4_literal'], ), "Create Mapnik grid tile") else: gene.get( MapnikTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], proj4_literal=grid['proj4_literal'], ), "Create Mapnik tile") def wrong_content_type_to_error(tile): if tile is not None and tile.content_type is not None \ and tile.content_type.find("image/") != 0: if tile.content_type.find( "application/vnd.ogc.se_xml") == 0: tile.error = "WMS server error: %s" % ( self._re_rm_xml_tag.sub( '', tile.data.decode('utf-8') if PY3 else tile.data)) else: # pragma: no cover tile.error = "%s is not an image format, error: %s" % ( tile.content_type, tile.data) return tile gene.imap(wrong_content_type_to_error) # Handle errors gene.add_error_filters() if meta: if options.role == 'hash': gene.imap(HashLogger('empty_metatile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_metatile_detection' in gene.layer: empty_tile = gene.layer['empty_metatile_detection'] gene.imap( HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_metatiles_dropped, )) def add_elapsed_togenerate(metatile): if metatile is not None: metatile.elapsed_togenerate = metatile.tilecoord.n**2 return True return False # pragma: no cover gene.ifilter(add_elapsed_togenerate) # Split the metatile image into individual tiles gene.add_metatile_splitter() gene.imap(Logger(logger, logging.INFO, '%(tilecoord)s')) # Handle errors gene.add_error_filters() if gene.layer['type'] != 'mapnik' or gene.layer[ 'output_format'] != 'grid': count_tiles = gene.counter() if 'pre_hash_post_process' in gene.layer: # pragma: no cover gene.process(gene.layer['pre_hash_post_process']) if options.role == 'hash': gene.imap(HashLogger('empty_tile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_tile_detection' in gene.layer: empty_tile = gene.layer['empty_tile_detection'] gene.imap( HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, )) gene.process() else: # pragma: no cover count_tiles = gene.counter() if options.role in ('local', 'slave'): gene.add_error_filters() gene.ifilter(DropEmpty(gene)) count_tiles_stored = gene.counter(size=True) if options.time: def log_size(tile): sys.stdout.write('size: %i\n' % len(tile.data)) return tile gene.imap(log_size) gene.put(cache_tilestore, "Store the tile") gene.add_error_filters() if options.generated_tiles_file: # pragma: no cover generated_tiles_file = open(options.generated_tiles_file, 'a') def do(tile): generated_tiles_file.write('%s\n' % (tile.tilecoord, )) return tile gene.imap(do) if options.role == 'slave': # pragma: no cover if meta: def decr_tile_in_metatile(tile): tile.metatile.elapsed_togenerate -= 1 if tile.metatile.elapsed_togenerate == 0: sqs_tilestore.delete_one(tile.metatile) return True gene.ifilter(decr_tile_in_metatile) else: gene.delete(sqs_tilestore) message = [] if options.time is not None: class LogTime: n = 0 t1 = None def __call__(self, tile): self.n += 1 if self.n == options.time: self.t1 = datetime.now() elif self.n == 2 * options.time: t2 = datetime.now() d = (t2 - self.t1) / options.time sys.stdout.write( 'time: %i\n' % ((d.days * 24 * 3600 + d.seconds) * 1000000 + d.microseconds)) return tile gene.imap(LogTime()) gene.consume(options.time * 3) else: gene.consume() message = [ "The tile generation of layer '{}{}' is finish".format( gene.layer['name'], "" if len(dimensions) == 0 or gene.layer['type'] != 'wms' else " (%s)" % ", ".join(["=".join(d) for d in dimensions.items()])), ] if options.role == "master": # pragma: no cover message.append("Nb of generated jobs: {}".format( count_tiles.nb)) else: if meta: message += [ "Nb generated metatiles: {}".format( count_metatiles.nb), "Nb metatiles dropped: {}".format( count_metatiles_dropped.nb), ] message += [ "Nb generated tiles: {}".format(count_tiles.nb), "Nb tiles dropped: {}".format(count_tiles_dropped.nb), ] if options.role in ('local', 'slave'): message += [ "Nb tiles stored: {}".format(count_tiles_stored.nb), "Nb tiles in error: {}".format(gene.error), "Total time: {}".format(duration_format( gene.duration)), ] if count_tiles_stored.nb != 0: message.append("Total size: {}".format( size_format(count_tiles_stored.size))) if count_tiles.nb != 0: message.append("Time per tile: {:0.0f} ms".format( (gene.duration / count_tiles.nb * 1000).seconds)) if count_tiles_stored.nb != 0: message.append("Size per tile: {:0.0f} o".format( count_tiles_stored.size / count_tiles_stored.nb)) if not options.quiet and options.role in ('local', 'slave'): print("\n".join(message) + "\n") if cache_tilestore is not None and hasattr(cache_tilestore, 'connection'): cache_tilestore.connection.close() if options.role != 'hash' and options.time is None and 'sns' in gene.config: # pragma: no cover if 'region' in gene.config['sns']: connection = sns.connect_to_region( gene.config['sns']['region']) else: connection = boto.connect_sns() sns_message = [message[0]] sns_message += [ "Layer: {}".format(gene.layer['name']), "Role: {}".format(options.role), "Host: {}".format(socket.getfqdn()), "Command: {}".format(' '.join([quote(arg) for arg in sys.argv])), ] sns_message += message[1:] connection.publish( gene.config['sns']['topic'], "\n".join(sns_message), "Tile generation (%(layer)s - %(role)s)" % { 'role': options.role, 'layer': gene.layer['name'] })
def setUp(self): self.connection = connect_to_region('us-west-2')
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): try: args = {} if complex_args: args.update(complex_args) args.update(parse_kv(module_args)) role_name = args["role_name"] account_number = args["account_number"] region = args["region"] logging_bucket = args["log_bucket"] envdict = {} if self.runner.environment: env = template.template(self.runner.basedir, self.runner.environment, inject, convert_bare=True) env = utils.safe_eval(env) bucketName = "config-bucket-%s-%s" % (account_number, region) snsName = "config-topic-%s-%s" % (account_number, region) s3_conn = s3.connect_to_region( region, aws_access_key_id=env.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key=env.get("AWS_SECRET_ACCESS_KEY"), security_token=env.get("AWS_SECURITY_TOKEN")) try: bucket = s3_conn.get_bucket(bucketName) except Exception, e: if (region == "us-east-1"): bucket1 = s3_conn.create_bucket(bucketName) bucket2 = s3_conn.get_bucket(logging_bucket) response = bucket1.enable_logging(bucket2, "ConfigBucket/") else: bucket1 = s3_conn.create_bucket(bucketName, location=region) bucket2 = s3_conn.get_bucket(logging_bucket) response = bucket1.enable_logging(bucket2, "ConfigBucket/") sns_conn = sns.connect_to_region( region, aws_access_key_id=env.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key=env.get("AWS_SECRET_ACCESS_KEY"), security_token=env.get("AWS_SECURITY_TOKEN")) sns_conn.create_topic(snsName) snsARN = "arn:aws:sns:%s:%s:%s" % (region, account_number, snsName) connection = configservice.connect_to_region( region, aws_access_key_id=env.get("AWS_ACCESS_KEY_ID"), aws_secret_access_key=env.get("AWS_SECRET_ACCESS_KEY"), security_token=env.get("AWS_SECURITY_TOKEN")) response = connection.describe_configuration_recorders() if len(response["ConfigurationRecorders"]) is 0: recorder_name = "config-recorder-%s" % account_number else: for item in response["ConfigurationRecorders"]: recorder_name = item["name"] response = connection.describe_delivery_channels() if len(response["DeliveryChannels"]) is 0: channel_name = "config-channel-%s" % account_number else: for item in response["DeliveryChannels"]: channel_name = item["name"] ConfigurationRecorder = { 'name': recorder_name, 'roleARN': "arn:aws:iam::%s:role/%s" % (account_number, role_name) } ConfigurationChannel = { 'name': channel_name, 's3BucketName': bucketName, 'snsTopicARN': snsARN } response = connection.put_configuration_recorder( ConfigurationRecorder) response = connection.put_delivery_channel(ConfigurationChannel) response = connection.start_configuration_recorder(recorder_name) return ReturnData(conn=conn, comm_ok=True, result=dict(failed=False, changed=False, msg="Config Service Created"))
def _gene(self, options, gene, layer, dimensions={}): count_metatiles = None count_metatiles_dropped = Count() count_tiles = None count_tiles_dropped = Count() count_tiles_stored = None if options.get_bbox: try: tilecoord = parse_tilecoord(options.get_bbox) print( "Tile bounds: [%i,%i,%i,%i]" % gene.layer['grid_ref']['obj'].extent(tilecoord) ) exit() except ValueError as e: # pragma: no cover exit( "Tile '%s' is not in the format 'z/x/y' or z/x/y:+n/+n\n%r" % (options.get_bbox, e) ) if options.get_hash: options.role = 'hash' options.test = 1 sqs_tilestore = None if options.role in ('master', 'slave'): # Create SQS queue sqs_tilestore = SQSTileStore(gene.get_sqs_queue()) # pragma: no cover cache_tilestore = None if options.role in ('local', 'slave'): cache_tilestore = gene.get_tilesstore(options.cache, dimensions) meta = gene.layer['meta'] if options.tiles: gene.set_store(TilesFileStore(options.tiles)) elif options.role in ('local', 'master'): # Generate a stream of metatiles gene.init_tilecoords() gene.add_geom_filter() if options.local_process_number is not None: # pragma: no cover gene.add_local_process_filter() elif options.role == 'slave': # Get the metatiles from the SQS queue gene.set_store(sqs_tilestore) # pragma: no cover elif options.role == 'hash': try: z, x, y = (int(v) for v in options.get_hash.split('/')) if meta: gene.set_tilecoords([TileCoord(z, x, y, gene.layer['meta_size'])]) else: gene.set_tilecoords([TileCoord(z, x, y)]) except ValueError as e: # pragma: no cover exit( "Tile '%s' is not in the format 'z/x/y'\n%r" % (options.get_hash, e) ) # At this stage, the tilestream contains metatiles that intersect geometry gene.add_logger() count_metatiles = gene.counter() if options.role == 'master': # pragma: no cover # Put the metatiles into the SQS queue gene.put(sqs_tilestore) count_tiles = gene.counter() elif options.role in ('local', 'slave', 'hash'): if gene.layer['type'] == 'wms': params = gene.layer['params'].copy() if 'STYLES' not in params: params['STYLES'] = ','.join(gene.layer['wmts_style'] for l in gene.layer['layers'].split(',')) if gene.layer['generate_salt']: params['SALT'] = str(random.randint(0, 999999)) params.update(dimensions) # Get the metatile image from the WMS server gene.get(URLTileStore( tilelayouts=(WMSTileLayout( url=gene.layer['url'], layers=gene.layer['layers'], srs=gene.layer['grid_ref']['srs'], format=gene.layer['mime_type'], border=gene.layer['meta_buffer'] if meta else 0, tilegrid=gene.get_grid()['obj'], params=params, ),), headers=gene.layer['headers'], ), "Get tile from WMS") elif gene.layer['type'] == 'mapnik': # pragma: no cover from tilecloud.store.mapnik_ import MapnikTileStore from tilecloud_chain.mapnik_ import MapnikDropActionTileStore grid = gene.get_grid() if gene.layer['output_format'] == 'grid': count_tiles = gene.counter() gene.get(MapnikDropActionTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], resolution=gene.layer['resolution'], layers_fields=gene.layer['layers_fields'], drop_empty_utfgrid=gene.layer['drop_empty_utfgrid'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, proj4_literal=grid['proj4_literal'], ), "Create Mapnik grid tile") else: gene.get(MapnikTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], proj4_literal=grid['proj4_literal'], ), "Create Mapnik tile") def wrong_content_type_to_error(tile): if tile is not None and tile.content_type is not None \ and tile.content_type.find("image/") != 0: if tile.content_type.find("application/vnd.ogc.se_xml") == 0: tile.error = "WMS server error: %s" % ( self._re_rm_xml_tag.sub( '', tile.data.decode('utf-8') if PY3 else tile.data ) ) else: # pragma: no cover tile.error = "%s is not an image format, error: %s" % ( tile.content_type, tile.data ) return tile gene.imap(wrong_content_type_to_error) # Handle errors gene.add_error_filters() if meta: if options.role == 'hash': gene.imap(HashLogger('empty_metatile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_metatile_detection' in gene.layer: empty_tile = gene.layer['empty_metatile_detection'] gene.imap(HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_metatiles_dropped, )) def add_elapsed_togenerate(metatile): if metatile is not None: metatile.elapsed_togenerate = metatile.tilecoord.n ** 2 return True return False # pragma: no cover gene.ifilter(add_elapsed_togenerate) # Split the metatile image into individual tiles gene.add_metatile_splitter() gene.imap(Logger(logger, logging.INFO, '%(tilecoord)s')) # Handle errors gene.add_error_filters() if gene.layer['type'] != 'mapnik' or gene.layer['output_format'] != 'grid': count_tiles = gene.counter() if 'pre_hash_post_process' in gene.layer: # pragma: no cover gene.process(gene.layer['pre_hash_post_process']) if options.role == 'hash': gene.imap(HashLogger('empty_tile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_tile_detection' in gene.layer: empty_tile = gene.layer['empty_tile_detection'] gene.imap(HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, )) gene.process() else: # pragma: no cover count_tiles = gene.counter() if options.role in ('local', 'slave'): gene.add_error_filters() gene.ifilter(DropEmpty(gene)) count_tiles_stored = gene.counter(size=True) if options.time: def log_size(tile): sys.stdout.write('size: %i\n' % len(tile.data)) return tile gene.imap(log_size) gene.put(cache_tilestore, "Store the tile") gene.add_error_filters() if options.generated_tiles_file: # pragma: no cover generated_tiles_file = open(options.generated_tiles_file, 'a') def do(tile): generated_tiles_file.write('%s\n' % (tile.tilecoord, )) return tile gene.imap(do) if options.role == 'slave': # pragma: no cover if meta: def decr_tile_in_metatile(tile): tile.metatile.elapsed_togenerate -= 1 if tile.metatile.elapsed_togenerate == 0: sqs_tilestore.delete_one(tile.metatile) return True gene.ifilter(decr_tile_in_metatile) else: gene.delete(sqs_tilestore) message = [] if options.time is not None: class LogTime: n = 0 t1 = None def __call__(self, tile): self.n += 1 if self.n == options.time: self.t1 = datetime.now() elif self.n == 2 * options.time: t2 = datetime.now() d = (t2 - self.t1) / options.time sys.stdout.write('time: %i\n' % ((d.days * 24 * 3600 + d.seconds) * 1000000 + d.microseconds)) return tile gene.imap(LogTime()) gene.consume(options.time * 3) else: gene.consume() message = [ "The tile generation of layer '{}{}' is finish".format( gene.layer['name'], "" if len(dimensions) == 0 or gene.layer['type'] != 'wms' else " (%s)" % ", ".join(["=".join(d) for d in dimensions.items()]) ), ] if options.role == "master": # pragma: no cover message.append("Nb of generated jobs: {}".format(count_tiles.nb)) else: if meta: message += [ "Nb generated metatiles: {}".format(count_metatiles.nb), "Nb metatiles dropped: {}".format(count_metatiles_dropped.nb), ] message += [ "Nb generated tiles: {}".format(count_tiles.nb), "Nb tiles dropped: {}".format(count_tiles_dropped.nb), ] if options.role in ('local', 'slave'): message += [ "Nb tiles stored: {}".format(count_tiles_stored.nb), "Nb tiles in error: {}".format(gene.error), "Total time: {}".format(duration_format(gene.duration)), ] if count_tiles_stored.nb != 0: message.append("Total size: {}".format(size_format(count_tiles_stored.size))) if count_tiles.nb != 0: message.append("Time per tile: {:0.0f} ms".format( (gene.duration / count_tiles.nb * 1000).seconds) ) if count_tiles_stored.nb != 0: message.append("Size per tile: {:0.0f} o".format( count_tiles_stored.size / count_tiles_stored.nb) ) if not options.quiet and options.role in ('local', 'slave'): print("\n".join(message) + "\n") if cache_tilestore is not None and hasattr(cache_tilestore, 'connection'): cache_tilestore.connection.close() if options.role != 'hash' and options.time is None and 'sns' in gene.config: # pragma: no cover if 'region' in gene.config['sns']: connection = sns.connect_to_region(gene.config['sns']['region']) else: connection = boto.connect_sns() sns_message = [message[0]] sns_message += [ "Layer: {}".format(gene.layer['name']), "Role: {}".format(options.role), "Host: {}".format(socket.getfqdn()), "Command: {}".format(' '.join([quote(arg) for arg in sys.argv])), ] sns_message += message[1:] connection.publish( gene.config['sns']['topic'], "\n".join(sns_message), "Tile generation (%(layer)s - %(role)s)" % { 'role': options.role, 'layer': gene.layer['name'] } )
def get_connection_sns(): region = sns.connect_to_region(settings.AWS_REGION_NAME, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY).region return sns.SNSConnection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID, region=region, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
def connect(self): self.connection = sns.connect_to_region( region_name=self.region, aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key ) return self.connection
def main(): try: opts, args = getopt.getopt(sys.argv[1:], 'hva:g:o:',['help', 'verbose', 'app-arn', 'region', 'output-file']) except: usage() sys.exit(2) app_arn = '' output_file = '' region = '' verbose = False for o, a in opts: if o in ('-h', '--help'): usage() sys.exit() if o in ('-v', '--verbose'): verbose = True if o in ('-a', '--sns_app_arn'): app_arn = a if o in ('-g', '--region'): region = a if o in ('-o', '--output'): output_file = a if app_arn == '': require_param("sns application arn") if output_file == '': require_param("output_file") if region == '': require_param("region (e.g., us-east-1, us-west-2)") #c = SNSConnection() c = sns.connect_to_region(region) total_arns = 0 next_token = None retries = 5 failures = 0 with open(output_file, 'w') as f: while failures < retries: try: endpoint_result = c.list_endpoints_by_platform_application(app_arn, next_token) next_token = endpoint_result['ListEndpointsByPlatformApplicationResponse']['ListEndpointsByPlatformApplicationResult']['NextToken'] endpoints = endpoint_result['ListEndpointsByPlatformApplicationResponse']['ListEndpointsByPlatformApplicationResult']['Endpoints'] total_arns += len(endpoints) print "Got %d more arns. Total arns %d" % (len(endpoints), total_arns) print "Next token: %s" % next_token # write to file for endpoint in endpoints: endpoint_arn = endpoint['EndpointArn'] endpoint_enabled = endpoint['Attributes']['Enabled'] f.write("%s,%s\n" % (endpoint_arn, endpoint_enabled)) print "Arns written to %s" % output_file # reset failures failures = 0 # check if there are any more arns if next_token == None: break except Exception, err: print "Error fetching user arns: %s" % str(err) failures += 1 f.close()
def get_sns_conn(): from boto import sns global sns_conn if sns_conn is None: sns_conn = sns.connect_to_region("ap-southeast-1") return sns_conn
def __init__(self, region, **kwargs): self.conn = sns.connect_to_region(region, **kwargs)
def main(): parser = ArgumentParser( description='Used to generate the tiles from Amazon EC2, ' 'and get the SQS queue status', prog=sys.argv[0] ) add_comon_options(parser) parser.add_argument( '--deploy-config', default=None, dest="deploy_config", metavar="FILE", help='path to the deploy configuration file' ) parser.add_argument( '--status', default=False, action="store_true", help='display the SQS queue status and exit' ) parser.add_argument( '--disable-geodata', default=True, action="store_false", dest="geodata", help='disable geodata synchronisation' ) parser.add_argument( '--disable-code', default=True, action="store_false", dest="deploy_code", help='disable deploy application code' ) parser.add_argument( '--disable-database', default=True, action="store_false", dest="deploy_database", help='disable deploy database' ) parser.add_argument( '--disable-fillqueue', default=True, action="store_false", dest="fill_queue", help='disable queue filling' ) parser.add_argument( '--disable-tilesgen', default=True, action="store_false", dest="tiles_gen", help='disable tile generation' ) parser.add_argument( '--host', default=None, help='The host used to generate tiles' ) parser.add_argument( '--shutdown', default=False, action="store_true", help='Shut done the remote host after the task.' ) parser.add_argument( '--wait', default=False, action="store_true", help='Wait that all the tasks will finish.' ) parser.add_argument( '--local', default=False, action="store_true", help='Run the generation locally' ) options = parser.parse_args() gene = TileGeneration(options.config, options, layer_name=options.layer) if options.status: # pragma: no cover status(options, gene) sys.exit(0) if 'ec2' not in gene.config: # pragma: no cover print("EC2 not configured") sys.exit(1) if options.deploy_config is None: options.deploy_config = gene.config['ec2']['deploy_config'] if options.geodata: options.geodata = not gene.config['ec2']['disable_geodata'] if options.deploy_code: options.deploy_code = not gene.config['ec2']['disable_code'] if options.deploy_database: options.deploy_database = not gene.config['ec2']['disable_database'] if options.fill_queue: # pragma: no cover options.fill_queue = not gene.config['ec2']['disable_fillqueue'] if options.tiles_gen: # pragma: no cover options.tiles_gen = not gene.config['ec2']['disable_tilesgen'] # start aws if not options.host: # TODO not implemented yet host = aws_start(gene.config['ec2']['host_type']) # pragma: no cover else: host = options.host if not options.local and options.geodata and 'geodata_folder' in gene.config['ec2']: print("==== Sync geodata ====") ssh_options = '' if 'ssh_options' in gene.config['ec2']: # pragma: no cover ssh_options = gene.config['ec2']['ssh_options'] # sync geodata run_local([ 'rsync', '--delete', '-e', 'ssh ' + ssh_options, '-r', gene.config['ec2']['geodata_folder'], host + ':' + gene.config['ec2']['geodata_folder'] ]) if options.deploy_code and not options.local: print("==== Sync and build code ====") error = gene.validate(gene.config['ec2'], 'ec2', 'code_folder', required=True) if error: exit(1) # pragma: no cover cmd = ['rsync', '--delete', ] if 'ssh_options' in gene.config['ec2']: # pragma: no cover cmd += ['-e', 'ssh ' + gene.config['ec2']['ssh_options']] ssh_options = gene.config['ec2']['ssh_options'] project_dir = gene.config['ec2']['code_folder'] cmd += ['-r', '.', host + ':' + project_dir] run_local(cmd) for cmd in gene.config['ec2']['build_cmds']: run(options, cmd % environ, host, project_dir, gene) if 'apache_content' in gene.config['ec2'] and 'apache_config' in gene.config['ec2']: run( options, 'echo %s > %s' % ( gene.config['ec2']['apache_content'], gene.config['ec2']['apache_config'] ), host, project_dir, gene ) run(options, 'sudo apache2ctl graceful', host, project_dir, gene) # deploy if options.deploy_database and not options.local: _deploy(gene, host) if options.deploy_code or options.deploy_database \ or options.geodata and not options.local: # TODO not implemented yet create_snapshot(host, gene) if options.time: arguments = _get_arguments(options) arguments.extend(['--role', 'local']) arguments.extend(['--time', str(options.time)]) project_dir = None if options.local else gene.config['ec2']['code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( "%sgenerate_tiles %s" % ( _get_path(), ' '.join([str(a) for a in arguments]) ), host, project_dir, gene ) ) tiles_size = [] times = [] for p in processes: results = p.communicate() if results[1] != '': # pragma: no cover logger.debug('ERROR: %s' % results[1]) if PY3: results = [r.decode('utf-8') for r in results] results = (re.sub(u'\n[^\n]*\r', u'\n', results[0]), ) results = (re.sub(u'^[^\n]*\r', u'', results[0]), ) for r in results[0].split('\n'): if r.startswith('time: '): times.append(int(r.replace('time: ', ''))) elif r.startswith('size: '): tiles_size.append(int(r.replace('size: ', ''))) if len(times) == 0: # pragma: no cover logger.error("Not enough data") sys.exit(1) mean_time = reduce( lambda x, y: x + y, [timedelta(microseconds=int(r)) for r in times], timedelta() ) / len(times) ** 2 mean_time_ms = mean_time.seconds * 1000 + mean_time.microseconds / 1000.0 mean_size = reduce(lambda x, y: x + y, [int(r) for r in tiles_size], 0) / len(tiles_size) mean_size_kb = mean_size / 1024.0 print('==== Time results ====') print('A tile is generated in: %0.3f [ms]' % mean_time_ms) print('Then mean generated tile size: %0.3f [kb]' % (mean_size_kb)) print('''config: cost: tileonly_generation_time: %0.3f tile_generation_time: %0.3f metatile_generation_time: 0 tile_size: %0.3f''' % (mean_time_ms, mean_time_ms, mean_size_kb)) if options.shutdown: # pragma: no cover run(options, 'sudo shutdown 0', host, project_dir, gene) sys.exit(0) if options.fill_queue and not options.local: # pragma: no cover print("==== Till queue ====") # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'master', '--quiet']) project_dir = gene.config['ec2']['code_folder'] run_remote_process( options, "%sgenerate_tiles %s" % ( _get_path(), ' '.join([str(a) for a in arguments]) ), host, project_dir, gene ) sleep(5) attributes = gene.get_sqs_queue().get_attributes() print( "\rTiles to generate: %s/%s" % ( attributes['ApproximateNumberOfMessages'], attributes['ApproximateNumberOfMessagesNotVisible'], ) ) if options.tiles_gen: # pragma: no cover print("==== Generate tiles ====") if options.wait and not options.local: print("") class Status(Thread): def run(self): # pragma: no cover while True: attributes = gene.get_sqs_queue().get_attributes() print( "\rTiles to generate/generating: %s/%s" % ( attributes['ApproximateNumberOfMessages'], attributes['ApproximateNumberOfMessagesNotVisible'], ) ) sleep(1) status_thread = Status() status_thread.setDaemon(True) status_thread.start() arguments = _get_arguments(options) arguments.extend(['--quiet']) if not options.local: arguments.extend(['--role', 'slave']) project_dir = None if options.local else gene.config['ec2']['code_folder'] threads = [] for i in range(gene.config['ec2']['number_process']): if options.local: threads.append(run_local_process( "%sgenerate_tiles --local-process-number %i %s" % ( _get_path(), i, ' '.join([str(a) for a in arguments]) ) )) else: run_remote_process( "%sgenerate_tiles %s" % ( _get_path(), ' '.join([str(a) for a in arguments]) ), host, project_dir, gene ) print('Tile generation started') if options.shutdown: run(options, 'sudo shutdown 0') if options.wait and options.local: while len(threads) > 0: threads = [t for t in threads if t.is_alive()] sleep(1) if 'sns' in gene.config: if 'region' in gene.config['sns']: connection = sns.connect_to_region(gene.config['sns']['region']) else: connection = boto.connect_sns() connection.publish( gene.config['sns']['topic'], """The tile generation is finish Host: %(host)s Command: %(cmd)s""" % { 'host': socket.getfqdn(), 'cmd': ' '.join([quote(arg) for arg in sys.argv]) }, "Tile generation controller" )
def _gene(self, options, gene, dimensions=None): if dimensions is None: # pragma: no cover dimensions = {} self.dimensions = dimensions self.count_metatiles = None self.count_metatiles_dropped = Count() self.count_tiles = Count() self.count_tiles_dropped = Count() self.count_tiles_stored = None self.sqs_tilestore = None self.cache_tilestore = None if options.get_bbox: try: tilecoord = parse_tilecoord(options.get_bbox) print("Tile bounds: [{},{},{},{}]".format(*default_int( gene.layer['grid_ref']['obj'].extent(tilecoord)))) exit() except ValueError as e: # pragma: no cover print( "Tile '{}' is not in the format 'z/x/y' or z/x/y:+n/+n\n{}" .format(options.get_bbox, repr(e))) exit(1) if options.get_hash: options.role = 'hash' options.test = 1 if options.role in ('master', 'slave'): # Create SQS queue self.sqs_tilestore = SQSTileStore( gene.get_sqs_queue(), on_empty=await_message if options.daemon else maybe_stop) # pragma: no cover if options.role in ('local', 'slave'): self.cache_tilestore = gene.get_tilesstore(options.cache, dimensions) if options.tiles: gene.set_store(TilesFileStore(options.tiles, options.layer)) elif options.role in ('local', 'master'): # Generate a stream of metatiles gene.init_tilecoords() gene.add_geom_filter() if options.role in ('local', 'master') and 'logging' in gene.config: gene.imap( DatabaseLoggerInit(gene.config['logging'], options is not None and options.daemon)) if options.local_process_number is not None: # pragma: no cover gene.add_local_process_filter() elif options.role == 'slave': # Get the metatiles from the SQS queue gene.set_store(self.sqs_tilestore) # pragma: no cover elif options.role == 'hash': try: z, x, y = (int(v) for v in options.get_hash.split('/')) if gene.layer.get('meta'): gene.set_tilecoords( [TileCoord(z, x, y, gene.layer['meta_size'])]) else: gene.set_tilecoords([TileCoord(z, x, y)]) except ValueError as e: # pragma: no cover exit("Tile '{}' is not in the format 'z/x/y'\n{}".format( options.get_hash, repr(e))) # At this stage, the tilestream contains metatiles that intersect geometry gene.add_logger() self.count_metatiles = gene.counter() if options.role == 'master': # pragma: no cover # Put the metatiles into the SQS queue gene.put(self.sqs_tilestore) self.count_tiles = gene.counter() elif options.role in ('local', 'slave', 'hash'): gene.get( MultiTileStore({ name: self._get_tilestore_for_layer(layer, gene) for name, layer in gene.layers.items() }), 'Get tile') def wrong_content_type_to_error(tile): if tile is not None and tile.content_type is not None \ and tile.content_type.find("image/") != 0: if tile.content_type.find( "application/vnd.ogc.se_xml") == 0: tile.error = "WMS server error: {}".format( (self._re_rm_xml_tag.sub('', tile.error))) else: # pragma: no cover tile.error = "{} is not an image format, error: {}".format( tile.content_type, tile.error) return tile gene.imap(wrong_content_type_to_error) gene.add_error_filters() if options.role == 'hash': if gene.layer.get('meta', False): gene.imap(HashLogger('empty_metatile_detection')) elif not options.near: droppers = {} for lname, layer in gene.layers.items(): if 'empty_metatile_detection' in layer: empty_tile = layer['empty_metatile_detection'] droppers[lname] = HashDropper( empty_tile['size'], empty_tile['hash'], store=self.cache_tilestore, queue_store=self.sqs_tilestore, count=self.count_metatiles_dropped, ) if droppers: gene.imap(MultiAction(droppers)) def add_elapsed_togenerate(metatile): if metatile is not None: metatile.elapsed_togenerate = metatile.tilecoord.n**2 return True return False # pragma: no cover gene.ifilter(add_elapsed_togenerate) # Split the metatile image into individual tiles gene.add_metatile_splitter() gene.imap(Logger(logger, logging.INFO, '%(tilecoord)s')) gene.imap(self.count_tiles) gene.process(key='pre_hash_post_process') if options.role == 'hash': gene.imap(HashLogger('empty_tile_detection')) elif not options.near: droppers = {} for lname, layer in gene.layers.items(): if 'empty_tile_detection' in layer: empty_tile = layer['empty_tile_detection'] droppers[lname] = HashDropper( empty_tile['size'], empty_tile['hash'], store=self.cache_tilestore, queue_store=self.sqs_tilestore, count=self.count_tiles_dropped, ) if droppers: gene.imap(MultiAction(droppers)) gene.process() else: # pragma: no cover self.count_tiles = gene.counter() if options.role in ('local', 'slave'): self.count_tiles_stored = gene.counter(size=True) if options.time: def log_size(tile): sys.stdout.write('size: {}\n'.format(len(tile.data))) return tile gene.imap(log_size) gene.put(self.cache_tilestore, "Store the tile") if options.generated_tiles_file: # pragma: no cover generated_tiles_file = open(options.generated_tiles_file, 'a') def do(tile): generated_tiles_file.write('{}\n'.format(tile.tilecoord)) return tile gene.imap(do) if options.role == 'slave': # pragma: no cover def delete_from_store(tile): if hasattr(tile, 'metatile'): tile.metatile.elapsed_togenerate -= 1 if tile.metatile.elapsed_togenerate == 0: self.sqs_tilestore.delete_one(tile.metatile) else: self.sqs_tilestore.delete_one(tile) return True gene.ifilter(delete_from_store) if options.role in ('local', 'slave') and 'logging' in gene.config: gene.imap( DatabaseLogger(gene.config['logging'], options is not None and options.daemon)) gene.add_error_filters() message = [] if options.time is not None: class LogTime: n = 0 t1 = None def __call__(self, tile): self.n += 1 if self.n == options.time: self.t1 = datetime.now() elif self.n == 2 * options.time: t2 = datetime.now() d = (t2 - self.t1) / options.time sys.stdout.write('time: {}\n'.format( ((d.days * 24 * 3600 + d.seconds) * 1000000 + d.microseconds))) return tile gene.imap(LogTime()) gene.consume(options.time * 3) else: gene.consume() if gene.layer is not None: message = [ "The tile generation of layer '{}{}' is finish".format( gene.layer['name'], "" if len(dimensions) == 0 or gene.layer['type'] != 'wms' else " ({})".format(", ".join( ["=".join(d) for d in dimensions.items()]))), ] if options.role == "master": # pragma: no cover message.append("Nb of generated jobs: {}".format( self.count_tiles.nb)) else: if gene.layer.get('meta'): message += [ "Nb generated metatiles: {}".format( self.count_metatiles.nb), "Nb metatiles dropped: {}".format( self.count_metatiles_dropped.nb), ] else: message = ["The tile generation is finish"] if options.role != "master": message += [ "Nb generated tiles: {}".format(self.count_tiles.nb), "Nb tiles dropped: {}".format(self.count_tiles_dropped.nb), ] if options.role in ('local', 'slave'): message += [ "Nb tiles stored: {}".format( self.count_tiles_stored.nb), "Nb tiles in error: {}".format(gene.error), "Total time: {}".format(duration_format( gene.duration)), ] if self.count_tiles_stored.nb != 0: message.append("Total size: {}".format( size_format(self.count_tiles_stored.size))) if self.count_tiles.nb != 0: message.append("Time per tile: {:0.0f} ms".format( (gene.duration / self.count_tiles.nb * 1000).seconds)) if self.count_tiles_stored.nb != 0: message.append("Size per tile: {:0.0f} o".format( self.count_tiles_stored.size / self.count_tiles_stored.nb)) if not options.quiet and options.role in ('local', 'slave'): print("\n".join(message) + "\n") if self.cache_tilestore is not None and hasattr( self.cache_tilestore, 'connection'): self.cache_tilestore.connection.close() if options.role != 'hash' and options.time is None and 'sns' in gene.config: # pragma: no cover if 'region' in gene.config['sns']: connection = sns.connect_to_region( gene.config['sns']['region']) else: connection = boto.connect_sns() sns_message = [message[0]] sns_message += [ "Layer: {}".format(gene.layer['name'] if gene. layer is not None else "(All layers)"), "Role: {}".format(options.role), "Host: {}".format(socket.getfqdn()), "Command: {}".format(' '.join([quote(arg) for arg in sys.argv])), ] sns_message += message[1:] connection.publish( gene.config['sns']['topic'], "\n".join(sns_message), "Tile generation ({layer!s} - {role!s})".format( **{ 'role': options.role, 'layer': gene.layer['name'] if gene. layer is not None else "All layers" }))
def __init__(self, config, kms): self.conn = sns.connect_to_region(config["region"]) self.arn = kms.decrypt(config["arn"])
# Load config values specified above application.config.from_object(__name__) # Load configuration values from a file application.config.from_envvar('APP_CONFIG', silent=True) FLASK_DEBUG = 'True' if os.environ.get('FLASK_DEBUG') is None else os.environ.get('FLASK_DEBUG') # Connect to DynamoDB and get ref to Table ddb_conn = dynamodb2.connect_to_region(application.config['AWS_REGION']) ddb_table = Table(table_name=application.config['CONTACT_TABLE'], connection=ddb_conn) # Connect to SNS sns_conn = sns.connect_to_region(application.config['AWS_REGION']) class resultClass: system_size = 0 sys_type = 0 total_cost = 0 irr = 0 roi = 0 average_savings = 0 def __init__(self, system_size, sys_type, total_cost, irr, roi, average_savings): self.system_size = system_size self.sys_type = sys_type self.total_cost = total_cost self.irr = irr
def subscribe_sns(queue): """Subscribes the SNS topic to the queue.""" conn = sns.connect_to_region(CONFIG['region']) sub = conn.subscribe_sqs_queue(CONFIG['sns_topic'], queue) sns_arn = sub['SubscribeResponse']['SubscribeResult']['SubscriptionArn'] return conn, sns_arn
def __init__(self): sqs_conn = sqs.connect_to_region('ap-northeast-1') self.queue = sqs_conn.get_queue(conf['NOTIFICATION_QUEUE_NAME']) self.sns_conn = sns.connect_to_region('ap-northeast-1')
def confirm_subscription_topic(topic, token): conn = sns.connect_to_region(AWS_REGION) return conn.confirm_subscription(topic, token)
def main(): parser = ArgumentParser( description='Used to generate the tiles from Amazon EC2, ' 'and get the SQS queue status', prog='./buildout/bin/generate_amazon') add_comon_options(parser) parser.add_argument('--deploy-config', default=None, dest="deploy_config", metavar="FILE", help='path to the deploy configuration file') parser.add_argument('--status', default=False, action="store_true", help='display the SQS queue status and exit') parser.add_argument('--disable-geodata', default=True, action="store_false", dest="geodata", help='disable geodata synchronisation') parser.add_argument('--disable-code', default=True, action="store_false", dest="deploy_code", help='disable deploy application code') parser.add_argument('--disable-database', default=True, action="store_false", dest="deploy_database", help='disable deploy database') parser.add_argument('--disable-fillqueue', default=True, action="store_false", dest="fill_queue", help='disable queue filling') parser.add_argument('--disable-tilesgen', default=True, action="store_false", dest="tiles_gen", help='disable tile generation') parser.add_argument('--host', default=None, help='The host used to generate tiles') parser.add_argument('--shutdown', default=False, action="store_true", help='Shut done the remote host after the task.') options = parser.parse_args() gene = TileGeneration(options.config, options, layer_name=options.layer) if options.status: # pragma: no cover status(options, gene) sys.exit(0) if 'ec2' not in gene.config: # pragma: no cover print "EC2 not configured" sys.exit(1) if options.deploy_config is None: options.deploy_config = gene.config['ec2']['deploy_config'] if options.geodata: options.geodata = not gene.config['ec2']['disable_geodata'] if options.deploy_code: options.deploy_code = not gene.config['ec2']['disable_code'] if options.deploy_database: options.deploy_database = not gene.config['ec2']['disable_database'] if options.fill_queue: # pragma: no cover options.fill_queue = not gene.config['ec2']['disable_fillqueue'] if options.tiles_gen: # pragma: no cover options.tiles_gen = not gene.config['ec2']['disable_tilesgen'] # start aws if not options.host: # TODO not implemented yet host = aws_start(gene.config['ec2']['host_type']) # pragma: no cover else: host = options.host if options.geodata and 'geodata_folder' in gene.config['ec2']: print "==== Sync geodata ====" ssh_options = '' if 'ssh_options' in gene.config['ec2']: # pragma: no cover ssh_options = gene.config['ec2']['ssh_options'] # sync geodata run_local([ 'rsync', '--delete', '-e', 'ssh ' + ssh_options, '-r', gene.config['ec2']['geodata_folder'], host + ':' + gene.config['ec2']['geodata_folder'] ]) if options.deploy_code: print "==== Sync and build code ====" error = gene.validate(gene.config['ec2'], 'ec2', 'code_folder', required=True) if error: exit(1) # pragma: no cover cmd = [ 'rsync', '--delete', ] if 'ssh_options' in gene.config['ec2']: # pragma: no cover cmd += ['-e', 'ssh ' + gene.config['ec2']['ssh_options']] ssh_options = gene.config['ec2']['ssh_options'] project_dir = gene.config['ec2']['code_folder'] cmd += ['-r', '.', host + ':' + project_dir] run_local(cmd) for cmd in gene.config['ec2']['build_cmds']: run_remote(cmd, host, project_dir, gene) if 'apache_content' in gene.config[ 'ec2'] and 'apache_config' in gene.config['ec2']: run_remote( 'echo %s > %s' % (gene.config['ec2']['apache_content'], gene.config['ec2']['apache_config']), host, project_dir, gene) run_remote('sudo apache2ctl graceful', host, project_dir, gene) # deploy if options.deploy_database: _deploy(gene, host) if options.deploy_code or options.deploy_database \ or options.geodata: # TODO not implemented yet create_snapshot(host, gene) if options.time: arguments = _get_arguments(options) arguments.extend(['--role', 'local']) arguments.extend(['--time', str(options.time)]) project_dir = gene.config['ec2']['code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene)) tiles_size = [] times = [] for p in processes: results = p.communicate() if results[1] != '': # pragma: no cover logger.debug('ERROR: %s' % results[1]) results = (re.sub(u'\n[^\n]*\r', u'\n', results[0]), ) results = (re.sub(u'^[^\n]*\r', u'', results[0]), ) for r in results[0].split('\n'): if r.startswith('time: '): times.append(int(r.replace('time: ', ''))) elif r.startswith('size: '): tiles_size.append(int(r.replace('size: ', ''))) if len(times) == 0: # pragma: no cover logger.error("Not enough data") sys.exit(1) mean_time = reduce(lambda x, y: x + y, [timedelta(microseconds=int(r)) for r in times], timedelta()) / len(times)**2 mean_time_ms = mean_time.seconds * 1000 + mean_time.microseconds / 1000.0 mean_size = reduce(lambda x, y: x + y, [int(r) for r in tiles_size], 0) / len(tiles_size) mean_size_kb = mean_size / 1024.0 print '==== Time results ====' print 'A tile is generated in: %0.3f [ms]' % mean_time_ms print 'Then mean generated tile size: %0.3f [kb]' % (mean_size_kb) print '''config: cost: tileonly_generation_time: %0.3f tile_generation_time: %0.3f metatile_generation_time: 0 tile_size: %0.3f''' % (mean_time_ms, mean_time_ms, mean_size_kb) if options.shutdown: # pragma: no cover run_remote('sudo shutdown 0', host, project_dir, gene) sys.exit(0) if options.fill_queue: # pragma: no cover print "==== Till queue ====" # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'master']) project_dir = gene.config['ec2']['code_folder'] run_remote( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene) if options.tiles_gen: # pragma: no cover print "==== Generate tiles ====" # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'slave']) arguments.append("--daemonize") project_dir = gene.config['ec2']['code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene)) if options.shutdown: for p in processes: p.communicate() # wait process end else: print 'Tile generation started in background' if options.shutdown: run_remote('sudo shutdown 0') if 'sns' in gene.config: if 'region' in gene.config['sns']: connection = sns.connect_to_region( gene.config['sns']['region']) else: connection = boto.connect_sns() connection.publish( gene.config['sns']['topic'], """The tile generation is finish Host: %(host)s Command: %(cmd)s""" % { 'host': socket.getfqdn(), 'cmd': ' '.join([quote(arg) for arg in sys.argv]) }, "Tile generation controller")
theme = db.Column(db.String(30)) previewAccess = db.Column(db.String(10)) age = db.Column(db.Integer) class Blog(db.Model): __tablename__ = 'blogs' id = db.Column(db.Integer, primary_key=True, autoincrement=True) author = db.Column(db.String(50)) post = db.Column(db.String(140)) # Connect to DynamoDB and get ref to Table #ddb_conn = dynamodb2.connect_to_region(application.config['AWS_REGION']) #ddb_table = Table(table_name=application.config['STARTUP_SIGNUP_TABLE'],connection=ddb_conn) # Connect to SNS sns_conn = sns.connect_to_region(application.config['AWS_REGION']) @application.route('/') def welcome(): theme = application.config['THEME'] return flask.render_template('index.html', theme=theme, flask_debug=application.debug) @application.route('/signup', methods=['POST']) def signup(): signup_data = dict() for item in request.form: signup_data[item] = request.form[item] exists = User.query.filter_by(email=signup_data["email"]).first()
def make_connection(self, region_name): return sns.connect_to_region(region_name)
def gene(self, options, gene, layer): count_metatiles = None count_metatiles_dropped = Count() count_tiles = None count_tiles_dropped = Count() if options.role == 'slave' or options.get_hash or options.get_bbox: gene.layer = gene.layers[layer] else: gene.set_layer(layer, options) if options.get_bbox: try: tilecoord = parse_tilecoord(options.get_bbox) print \ "Tile bounds: [%i,%i,%i,%i]" % \ gene.layer['grid_ref']['obj'].extent(tilecoord) exit() except ValueError as e: # pragma: no cover exit( "Tile '%s' is not in the format 'z/x/y' or z/x/y:+n/+n\n%r" % (options.get_bbox, e)) if options.get_hash: options.role = 'hash' options.test = 1 sqs_tilestore = None if options.role in ('master', 'slave'): # Create SQS queue sqs_tilestore = SQSTileStore( gene.get_sqs_queue()) # pragma: no cover cache_tilestore = None if options.role in ('local', 'slave'): cache_tilestore = gene.get_tilesstore(options.cache) meta = gene.layer['meta'] if options.tiles: gene.set_store(TilesFileStore(options.tiles)) elif options.role in ('local', 'master'): # Generate a stream of metatiles gene.init_tilecoords() gene.add_geom_filter() elif options.role == 'slave': # Get the metatiles from the SQS queue gene.set_store(sqs_tilestore) # pragma: no cover elif options.role == 'hash': try: z, x, y = (int(v) for v in options.get_hash.split('/')) if meta: gene.set_tilecoords( [TileCoord(z, x, y, gene.layer['meta_size'])]) else: gene.set_tilecoords([TileCoord(z, x, y)]) except ValueError as e: # pragma: no cover exit("Tile '%s' is not in the format 'z/x/y'\n%r" % (options.get_hash, e)) # At this stage, the tilestream contains metatiles that intersect geometry gene.add_logger() count_metatiles = gene.counter() if options.role == 'master': # pragma: no cover # Put the metatiles into the SQS queue gene.put(sqs_tilestore) elif options.role in ('local', 'slave', 'hash'): if gene.layer['type'] == 'wms': params = gene.layer['params'].copy() if 'STYLES' not in params: params['STYLES'] = ','.join(gene.layer['wmts_style'] for l in gene.layer['layers']) if gene.layer['generate_salt']: params['SALT'] = str(random.randint(0, sys.maxint)) for dim in gene.layer['dimensions']: params[dim['name']] = dim['value'] for dim in gene.options.dimensions: dim = dim.split('=') if len(dim) != 2: # pragma: no cover exit('the DIMENTIONS option should be like this ' 'DATE=2013 VERSION=13.') params[dim[0]] = dim[1] # Get the metatile image from the WMS server gene.get( URLTileStore( tilelayouts=(WMSTileLayout( url=gene.layer['url'], layers=','.join(gene.layer['layers']), srs=gene.layer['grid_ref']['srs'], format=gene.layer['mime_type'], border=gene.layer['meta_buffer'] if meta else 0, tilegrid=gene.get_grid()['obj'], params=params, ), ), headers=gene.layer['headers'], ), "Get tile from WMS") elif gene.layer['type'] == 'mapnik': from tilecloud.store.mapnik_ import MapnikTileStore from tilecloud_chain.mapnik_ import MapnikDropActionTileStore grid = gene.get_grid() if gene.layer['output_format'] == 'grid': gene.get( MapnikDropActionTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], resolution=gene.layer['resolution'], layers_fields=gene.layer['layers_fields'], drop_empty_utfgrid=gene. layer['drop_empty_utfgrid'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, proj4_literal=grid['proj4_literal'], ), "Create Mapnik grid tile") else: gene.get( MapnikTileStore( tilegrid=grid['obj'], mapfile=gene.layer['mapfile'], image_buffer=gene.layer['meta_buffer'] if meta else 0, data_buffer=gene.layer['data_buffer'], output_format=gene.layer['output_format'], proj4_literal=grid['proj4_literal'], ), "Create Mapnik tile") def wrong_content_type_to_error(tile): if tile is not None and tile.content_type is not None \ and tile.content_type.find("image/") != 0: if tile.content_type.find( "application/vnd.ogc.se_xml") == 0: tile.error = "WMS server error: %s" % ( self._re_rm_xml_tag.sub('', tile.data)) else: # pragma: no cover tile.error = "%s is not an image format, error: %s" % ( tile.content_type, tile.data) return tile gene.imap(wrong_content_type_to_error) # Handle errors gene.add_error_filters() if meta: if options.role == 'hash': gene.imap(HashLogger('empty_metatile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_metatile_detection' in gene.layer: empty_tile = gene.layer['empty_metatile_detection'] gene.imap( HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_metatiles_dropped, )) def add_elapsed_togenerate(metatile): if metatile is not None: metatile.elapsed_togenerate = metatile.tilecoord.n**2 return True return False # pragma: no cover gene.ifilter(add_elapsed_togenerate) # Split the metatile image into individual tiles gene.add_metatile_splitter() gene.imap(Logger(logger, logging.INFO, '%(tilecoord)s')) # Handle errors gene.add_error_filters() self.count_tiles = gene.counter() if 'pre_hash_post_process' in gene.layer: gene.process(gene.layer['pre_hash_post_process']) if options.role == 'hash': gene.imap(HashLogger('empty_tile_detection')) elif not options.near: # Discard tiles with certain content if 'empty_tile_detection' in gene.layer: empty_tile = gene.layer['empty_tile_detection'] gene.imap( HashDropper( empty_tile['size'], empty_tile['hash'], store=cache_tilestore, queue_store=sqs_tilestore, count=count_tiles_dropped, )) gene.process() if options.role in ('local', 'slave'): gene.add_error_filters() gene.ifilter(DropEmpty(gene)) count_tiles = gene.counter(size=True) if options.time: def log_size(tile): sys.stdout.write('size: %i\n' % len(tile.data)) return tile gene.imap(log_size) gene.put(cache_tilestore, "Store the tile") else: count_tiles = gene.counter(size=True) gene.add_error_filters() if options.generated_tiles_file: # pragma: no cover generated_tiles_file = open(options.generated_tiles_file, 'a') def do(tile): generated_tiles_file.write('%s\n' % (tile.tilecoord, )) return tile gene.imap(do) if options.role == 'slave': # pragma: no cover if meta: def decr_tile_in_metatile(tile): tile.metatile.elapsed_togenerate -= 1 if tile.metatile.elapsed_togenerate == 0: sqs_tilestore.delete_one(tile.metatile) return True gene.ifilter(decr_tile_in_metatile) else: gene.delete(sqs_tilestore) if options.time is not None: class LogTime: n = 0 t1 = None def __call__(self, tile): self.n += 1 if self.n == options.time: self.t1 = datetime.now() elif self.n == 2 * options.time: t2 = datetime.now() d = (t2 - self.t1) / options.time sys.stdout.write( 'time: %i\n' % ((d.days * 24 * 3600 + d.seconds) * 1000000 + d.microseconds)) return tile gene.imap(LogTime()) gene.consume(options.time * 3) else: gene.consume() if not options.quiet and options.role in ('local', 'slave'): nb_tiles = count_tiles.nb + count_tiles_dropped.nb print """The tile generation of layer '%s' is finish %sNb generated tiles: %i Nb tiles dropped: %i Nb tiles stored: %i Nb error: %i Total time: %s Total size: %s Time per tiles: %i ms Size per tile: %i o """ % \ ( gene.layer['name'], """Nb generated metatiles: %i Nb metatiles dropped: %i """ % ( count_metatiles.nb, count_metatiles_dropped.nb ) if meta else '', nb_tiles, count_tiles_dropped.nb, count_tiles.nb, gene.error, duration_format(gene.duration), size_format(count_tiles.size), (gene.duration / nb_tiles * 1000).seconds if nb_tiles != 0 else 0, count_tiles.size / count_tiles.nb if count_tiles.nb != 0 else -1 ) if cache_tilestore is not None and hasattr(cache_tilestore, 'connection'): cache_tilestore.connection.close() if options.role != 'hash' and options.time is None and 'sns' in gene.config: # pragma: no cover if 'region' in gene.config['sns']: connection = sns.connect_to_region( gene.config['sns']['region']) else: connection = boto.connect_sns() connection.publish( gene.config['sns']['topic'], """The tile generation is finish Layer: %(layer)s Role: %(role)s Host: %(host)s Command: %(cmd)s %(meta)sNb generated tiles: %(nb_tiles)i Nb tiles dropped: %(nb_tiles_dropped)i Total time: %(duration)s [s] Time per tiles: %(tile_duration)i [ms]""" % { 'role': options.role, 'layer': gene.layer['name'], 'host': socket.getfqdn(), 'cmd': ' '.join([quote(arg) for arg in sys.argv]), 'meta': """Nb generated metatiles: %(nb_metatiles)i Nb metatiles dropped: %(nb_metatiles_dropped)i """ % { 'nb_metatiles': count_metatiles.nb, 'nb_metatiles_dropped': count_metatiles_dropped.nb, } if meta else '', 'nb_tiles': nb_tiles if meta else count_metatiles.nb, 'nb_tiles_dropped': count_tiles_dropped.nb if meta else count_metatiles_dropped.nb, 'duration': duration_format(gene.duration), 'tile_duration': (gene.duration / nb_tiles * 1000).seconds if nb_tiles != 0 else 0, }, "Tile generation (%(layer)s - %(role)s)" % { 'role': options.role, 'layer': gene.layer['name'] })
def __init__(self, region, topic): self.region = region self.topic = topic self._snsConn = sns.connect_to_region(region)
def publishsns(aws_access_key, aws_secret_access_key, region, topicArn, msg, subject): c = connect_to_region(region, aws_access_key_id = AWS_ACCESS_KEY, aws_secret_access_key = AWS_SECRET_ACCESS_KEY) c.publish(message = msg,subject = subject, target_arn = topicArn)
def main(): parser = ArgumentParser( description='Used to generate the tiles from Amazon EC2, ' 'and get the SQS queue status', prog='./buildout/bin/generate_amazon' ) add_comon_options(parser) parser.add_argument( '--deploy-config', default=None, dest="deploy_config", metavar="FILE", help='path to the deploy configuration file' ) parser.add_argument( '--status', default=False, action="store_true", help='display the SQS queue status and exit' ) parser.add_argument( '--disable-geodata', default=True, action="store_false", dest="geodata", help='disable geodata synchronisation' ) parser.add_argument( '--disable-code', default=True, action="store_false", dest="deploy_code", help='disable deploy application code' ) parser.add_argument( '--disable-database', default=True, action="store_false", dest="deploy_database", help='disable deploy database' ) parser.add_argument( '--disable-fillqueue', default=True, action="store_false", dest="fill_queue", help='disable queue filling' ) parser.add_argument( '--disable-tilesgen', default=True, action="store_false", dest="tiles_gen", help='disable tile generation' ) parser.add_argument( '--host', default=None, help='The host used to generate tiles' ) parser.add_argument( '--shutdown', default=False, action="store_true", help='Shut done the remote host after the task.' ) options = parser.parse_args() gene = TileGeneration(options.config, options, layer_name=options.layer) if options.status: # pragma: no cover status(options, gene) sys.exit(0) if 'ec2' not in gene.config: # pragma: no cover print "EC2 not configured" sys.exit(1) if options.deploy_config is None: options.deploy_config = gene.config['ec2']['deploy_config'] if options.geodata: options.geodata = not gene.config['ec2']['disable_geodata'] if options.deploy_code: options.deploy_code = not gene.config['ec2']['disable_code'] if options.deploy_database: options.deploy_database = not gene.config['ec2']['disable_database'] if options.fill_queue: # pragma: no cover options.fill_queue = not gene.config['ec2']['disable_fillqueue'] if options.tiles_gen: # pragma: no cover options.tiles_gen = not gene.config['ec2']['disable_tilesgen'] # start aws if not options.host: # TODO not implemented yet host = aws_start(gene.config['ec2']['host_type']) # pragma: no cover else: host = options.host if options.geodata and 'geodata_folder' in gene.config['ec2']: print "==== Sync geodata ====" ssh_options = '' if 'ssh_options' in gene.config['ec2']: # pragma: no cover ssh_options = gene.config['ec2']['ssh_options'] # sync geodata run_local([ 'rsync', '--delete', '-e', 'ssh ' + ssh_options, '-r', gene.config['ec2']['geodata_folder'], host + ':' + gene.config['ec2']['geodata_folder'] ]) if options.deploy_code: print "==== Sync and build code ====" error = gene.validate(gene.config['ec2'], 'ec2', 'code_folder', required=True) if error: exit(1) # pragma: no cover cmd = ['rsync', '--delete', ] if 'ssh_options' in gene.config['ec2']: # pragma: no cover cmd += ['-e', 'ssh ' + gene.config['ec2']['ssh_options']] ssh_options = gene.config['ec2']['ssh_options'] project_dir = gene.config['ec2']['code_folder'] cmd += ['-r', '.', host + ':' + project_dir] run_local(cmd) for cmd in gene.config['ec2']['build_cmds']: run_remote(cmd, host, project_dir, gene) if 'apache_content' in gene.config['ec2'] and 'apache_config' in gene.config['ec2']: run_remote( 'echo %s > %s' % ( gene.config['ec2']['apache_content'], gene.config['ec2']['apache_config'] ), host, project_dir, gene ) run_remote('sudo apache2ctl graceful', host, project_dir, gene) # deploy if options.deploy_database: _deploy(gene, host) if options.deploy_code or options.deploy_database \ or options.geodata: # TODO not implemented yet create_snapshot(host, gene) if options.time: arguments = _get_arguments(options) arguments.extend(['--role', 'local']) arguments.extend(['--time', str(options.time)]) project_dir = gene.config['ec2']['code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene ) ) tiles_size = [] times = [] for p in processes: results = p.communicate() if results[1] != '': # pragma: no cover logger.debug('ERROR: %s' % results[1]) results = (re.sub(u'\n[^\n]*\r', u'\n', results[0]), ) results = (re.sub(u'^[^\n]*\r', u'', results[0]), ) for r in results[0].split('\n'): if r.startswith('time: '): times.append(int(r.replace('time: ', ''))) elif r.startswith('size: '): tiles_size.append(int(r.replace('size: ', ''))) if len(times) == 0: # pragma: no cover logger.error("Not enough data") sys.exit(1) mean_time = reduce( lambda x, y: x + y, [timedelta(microseconds=int(r)) for r in times], timedelta() ) / len(times) ** 2 mean_time_ms = mean_time.seconds * 1000 + mean_time.microseconds / 1000.0 mean_size = reduce(lambda x, y: x + y, [int(r) for r in tiles_size], 0) / len(tiles_size) mean_size_kb = mean_size / 1024.0 print '==== Time results ====' print 'A tile is generated in: %0.3f [ms]' % mean_time_ms print 'Then mean generated tile size: %0.3f [kb]' % (mean_size_kb) print '''config: cost: tileonly_generation_time: %0.3f tile_generation_time: %0.3f metatile_generation_time: 0 tile_size: %0.3f''' % (mean_time_ms, mean_time_ms, mean_size_kb) if options.shutdown: # pragma: no cover run_remote('sudo shutdown 0', host, project_dir, gene) sys.exit(0) if options.fill_queue: # pragma: no cover print "==== Till queue ====" # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'master']) project_dir = gene.config['ec2']['code_folder'] run_remote( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene ) if options.tiles_gen: # pragma: no cover print "==== Generate tiles ====" # TODO test arguments = _get_arguments(options) arguments.extend(['--role', 'slave']) arguments.append("--daemonize") project_dir = gene.config['ec2']['code_folder'] processes = [] for i in range(gene.config['ec2']['number_process']): processes.append( run_remote_process( './buildout/bin/generate_tiles ' + ' '.join([str(a) for a in arguments]), host, project_dir, gene) ) if options.shutdown: for p in processes: p.communicate() # wait process end else: print 'Tile generation started in background' if options.shutdown: run_remote('sudo shutdown 0') if 'sns' in gene.config: if 'region' in gene.config['sns']: connection = sns.connect_to_region(gene.config['sns']['region']) else: connection = boto.connect_sns() connection.publish( gene.config['sns']['topic'], """The tile generation is finish Host: %(host)s Command: %(cmd)s""" % { 'host': socket.getfqdn(), 'cmd': ' '.join([quote(arg) for arg in sys.argv]) }, "Tile generation controller")
def snsAlert(access_key,secret_key,topic,message,subject,region='us-west-2'): from boto import sns conn = sns.connect_to_region(region, aws_access_key_id=access_key,aws_secret_access_key=secret_key) conn.publish(topic, message, subject)