def launch_deployment_host(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="launch_multideployer", exchange=exchange) ec2_conn = connect_ec2(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY) for dephost in queryset: # publish launch_multideployer message publisher.publish({"request_id": dephost.id}) dephost.instance_id = "launching..." if dephost.name is None or dephost.name == '': dephost.name = 'dephost_%s' % dephost.id dephost.save() # create new instance to receive message image = ec2_conn.get_image(settings.MULTIDEPLOYER_AMI_ID) reservation = image.run(subnet_id=settings.VPC_SUBNET_ID, placement='us-east-1b', key_name='otp-dev', instance_type='m2.xlarge') time.sleep(5) for instance in reservation.instances: instance.add_tag("Name", dephost.name)
def accept_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="create_instance", exchange=exchange) for irequest in queryset: if irequest.state != 'approved': files = [ gtfsfile.s3_key for gtfsfile in irequest.gtfsfile_set.all() ] extra_props = [ gtfsfile.extra_properties for gtfsfile in irequest.gtfsfile_set.all() ] publisher.publish({ "files": files, "extra_properties": extra_props, "request_id": irequest.id, "fare_factory": irequest.fare_factory }) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
def deploy_build_to_host(build, host): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="deploy_graph_multi", exchange=exchange) publisher.publish({ "request_id": build.id, "instance_id": host.instance_id, "graph_key": build.graph_key })
def save(self, force_insert=False, force_update=False): if self.deployment_host != self.__original_dephost and self.deployment_host is not None: # dephost changed - publish deployer message exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="deploy_graph_multi", exchange=exchange) publisher.publish({ "request_id": self.id, "instance_id": self.deployment_host.instance_id, "graph_key": self.graph_key }) super(InstanceRequest, self).save(force_insert, force_update) self.__original_dephost = self.deployment_host
def build_deployment_from_config(config_txt): config = json.loads(config_txt) if not 'metroId' in config: return "no metroId provided" if 'metro' in config: metro_name = config['metro'] else: metro_name = "N/A" response = 'Metro #%s (%s) ' % (config['metroId'], metro_name) source = 'metro-%s' % config['metroId'] osm_key = None try: man_dep = ManagedDeployment.objects.get(source=source) response += " has existing record." except ManagedDeployment.DoesNotExist: group = DeploymentGroup.objects.get(name="otpna") man_dep = ManagedDeployment(source=source, group=group) response += " has no record; created." man_dep.description = metro_name man_dep.save() build = GraphBuild(deployment=man_dep, osm_key=man_dep.last_osm_key, config=config_txt) build.save() exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="process_gtfs", exchange=exchange) publisher.publish({'id': build.id, 'config': config_txt}) response += ' Published process_gtfs message.' #publisher = conn.Producer(routing_key="build_managed", exchange=exchange) #publisher.publish({'id' : build.id, 'osm_key' : build.osm_key, 'config' : request.REQUEST['data']}) #html = html + 'published build_managed message<br>' return response
def finalize_request(request): request_id = request.POST['request_id'] irequest = InstanceRequest.objects.get(id=request_id) if irequest.user != request.user: return redirect("/") exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="transload", exchange=exchange) transloading = False s3_keys = [] to_transload = [] for gtfs_file in irequest.gtfsfile_set.all(): if gtfs_file.transload_url: transloading = True to_transload.append(gtfs_file) else: s3_keys.append(gtfs_file.s3_key) if transloading: irequest.state = 'pre_transload' irequest.save() for gtfs_file in to_transload: publisher.publish({ "transload": gtfs_file.transload_url, "gtfs_file_id": gtfs_file.id }) else: irequest.state = 'submitted' irequest.save() publisher = conn.Producer(routing_key="validate_request", exchange=exchange) publisher.publish({"files": s3_keys, "request_id": irequest.id}) # start validator instance, if needed check_for_running_instance(settings.VALIDATOR_AMI_ID) publisher.close() return render_to_response(request, 'request_submitted.html', locals())
def rebuild_instance_request(modeladmin, request, queryset): exchange = Exchange("amq.direct", type="direct", durable=True) conn = DjangoBrokerConnection() publisher = conn.Producer(routing_key="rebuild_graph", exchange=exchange) for irequest in queryset: publisher.publish({ "request_id": irequest.id, "data_key": irequest.data_key }) if hasattr(queryset, 'update'): queryset.update(state='accepted', decision_date=datetime.now()) else: for irequest in queryset: irequest.state = "accepted" irequest.decision_date = datetime.now() irequest.save() #launch a graph builder EC2 instance check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
exchange = Exchange("amq.direct", type="direct", durable=True) queues = [ Queue("setup_proxy", exchange=exchange, routing_key="setup_proxy"), Queue("init_proxy_multi", exchange=exchange, routing_key="init_proxy_multi"), Queue("register_proxy_multi", exchange=exchange, routing_key="register_proxy_multi") ] def handle(conn, body, message): key = message.delivery_info['routing_key'] getattr(handlers, key)(conn, body) message.ack() with DjangoBrokerConnection() as conn: with conn.Consumer(queues, callbacks=[ lambda body, message: handle(conn, body, message) ]) as consumer: # Process messages and handle events on all channels while True: conn.drain_events()