def build_new_package(request): site_name = request.GET.get('site_name', '') service_name = request.GET.get('service_name', '') package = Package.get_sm_package_by_name(request.GET['name']) branch = request.GET.get('branch', '') # The next version number is a combination of the actual version # number and the name of the actual branch next_version = request.GET.get('next_version', '') + '-' + branch remote = package.get_github_info()['ssh_url'] errors = validate_package_release(package, branch, next_version) if len(errors) == 0: job_dict = enqueue_push_package(request.user['username'], site_name, service_name, package, remote, branch, next_version) else: msg = "There were errors attempting to build new package %s" % (package.name) html = render('doula:templates/packages/build_new_package_modal_error.html', {'msg': msg, 'errors': errors}) return dumps({'success': False, 'msg': msg, 'html': html}) return dumps({'success': True, 'job': job_dict})
def deploy_application(request): try: dao = SiteDAO() site = dao.get_site(request.POST['site']) app = site.applications[request.POST['application']] app.deploy_application(site) return dumps({ 'success': True, 'app': app }) except KeyError as e: msg = 'Unable to deploy application under "{0}"' msg = msg.format(request.POST['site'], request.POST['application']) return dumps({ 'success': False, 'msg': msg })
def tag_application(request): try: dao = SiteDAO() site = dao.get_site(request.POST['site']) app = site.applications[request.POST['application']] app.tag(request.POST['tag'], request.POST['msg']) return dumps({ 'success': True, 'app': app }) except KeyError as e: msg = 'Unable to tag site and application under "{0}" and "{1}"' msg = msg.format(request.POST['site'], request.POST['application']) return dumps({ 'success': False, 'msg': msg })
def send_notification(job_dict, exception=None): """ We only send out notifications for jobs initiated by users which are push to cheese prism, cycle services and release service """ try: if job_dict: emailable_jobs = [ 'build_new_package', 'cycle_service', 'release_service'] if job_dict['job_type'] in emailable_jobs: email_list = build_email_list(job_dict) if len(email_list) > 0: if job_dict['status'] == 'complete': email_success(email_list, job_dict) elif job_dict['status'] == 'failed': email_fail(email_list, job_dict, exception) except Exception as e: # error trying to notify user subject = 'Error notifying user: '******'*****@*****.**'] body = dumps(job_dict) + "\n\n<br /><br />" body += traceback.format_exc() email(subject, email_list, body)
def update_release_for_service(self, service): """ Update the release for a specific service, update all the release keys """ print 'PULLING RELEASES FOR THE SERVICE: ' + service releases_and_branches = pull_releases_for_service(service) for site, releases in releases_and_branches.iteritems(): for release in releases: release_as_json = dumps(release) subs = { "site": site, "service": service, "date": release["date_as_epoch"], "release_number": release['release_number'] } if release["release_number"]: release_by_number_key = key_val("release_by_number", subs) self.redis.set(release_by_number_key, release_as_json) release_by_date_key = key_val("release_by_date", subs) self.redis.set(release_by_date_key, release_as_json) # Add the release to the list of releases releases_key = key_val("releases_for_service", subs) self.redis.zadd(releases_key, release_as_json, release["date_as_epoch"])
def show_site(request): dao = SiteDAO() site = dao.get_site(request.matchdict['site']) if not site: msg = 'Unable to find site "{0}"'.format(request.matchdict['site']) raise HTTPNotFound(msg) return { 'site': site, 'site_json': dumps(site) }
def load_config(event): """ Load the Service config settings """ Config.load_config(event.app.registry.settings) Redis.get_instance().set('doula:settings', dumps(event.app.registry.settings)) # When the service starts we'll make sure Doula updates it self pulling updatedoula(None) start_task_scheduling()
def get_doula_admins(): """ Get the Doula admins from redis. if redis doesn't exist pull now """ redis = Redis.get_instance() admins_as_json = redis.get("doula.admins") if admins_as_json: admins = json.loads(admins_as_json) else: admins = pull_doula_admins() redis.set('doula.admins', dumps(admins)) return admins
def update_site_and_service_models(self): """ Pull all the site data from the Bambinos. Ask the ModelFactory to build the model objects and finally save to redis """ pipeline = self.redis.pipeline() all_registered_sites = self.get_all_registered_sites() all_sites = self.mf.pull_and_build_sites(all_registered_sites) for name, site in all_sites.iteritems(): # Save the entire site and it's services pipeline.sadd('doula.sites', site.name) pipeline.set('doula.site:%s' % site.name, dumps(site)) # Save the individual service for service_name, service in site.services.iteritems(): vals = (site.name, service.name) # save the individual service as json pipeline.set('doula.service:%s:%s' % vals, dumps(service)) # save the services as a set of the site pipeline.sadd('doula.site.services:%s' % site.name, service.name) pipeline.execute()
def site(request): dd = DoulaDAL() site = dd.find_site_by_name(request.matchdict['site_name']) # diffs for all services, # need to diff the service's last prod release diffs = {} for service_name, service in site.services.iteritems(): last_release = find_last_production_release(site, service) diff = last_release.diff_service_and_release(service) diffs[service.name_url] = diff return { 'site': site, 'user': request.user, 'site_json': dumps(site), 'token': Config.get('token'), 'config': Config, 'diffs': diffs }
def stringify(obj): return dumps(obj)