Exemplo n.º 1
0
    def handle(self, *args, **options):

        root_dir = settings.FILEPATH
        git_snapshot = gitinfo.get_project_snapshot(root_dir, submodules=True)
        git_snapshot['diff_url'] = options.get('url', None)
        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            code_snapshot=git_snapshot,
        )
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print "\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated)

        if options['mail_admins']:
            snapshot_table = render_to_string('hqadmin/partials/project_snapshot.html', dictionary={'snapshot': git_snapshot})
            message = "Deployed by %s, cheers!" % options['user']
            snapshot_body = "<html><head><title>Deploy Snapshot</title></head><body><h2>%s</h2>%s</body></html>" % (message, snapshot_table)

            call_command('mail_admins', snapshot_body, **{'subject': 'Deploy successful', 'html': True})
 def handle(self, *args, **options):
     deploy = HqDeploy(
         date=datetime.utcnow(),
         user=options['user']
     )
     deploy.save()
     
    def handle(self, *args, **options):

        root_dir = settings.FILEPATH
        git_snapshot = gitinfo.get_project_snapshot(root_dir, submodules=True)
        compare_url = git_snapshot['diff_url'] = options.get('url', None)
        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            code_snapshot=git_snapshot,
        )
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print "\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated)

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}*. "
            "Find the diff {{diff_link}}".format(
                options['environment'],
                options['user'],
            )
        )
        if hasattr(settings, 'MIA_THE_DEPLOY_BOT_API'):
            link = diff_link(STYLE_SLACK, compare_url)
            requests.post(settings.MIA_THE_DEPLOY_BOT_API, data=json.dumps({
                "username": "******",
                "text": deploy_notification_text.format(diff_link=link),
            }))

        if settings.DATADOG_API_KEY:
            tags = ['environment:{}'.format(options['environment'])]
            link = diff_link(STYLE_MARKDOWN, compare_url)
            datadog_api.Event.create(
                title="Deploy Success",
                text=deploy_notification_text.format(diff_link=link),
                tags=tags,
                alert_type="success"
            )

            print "\n=============================================================\n" \
                  "Congratulations! Deploy Complete.\n\n" \
                  "Don't forget to keep an eye on the deploy dashboard to " \
                  "make sure everything is running smoothly.\n\n" \
                  "https://p.datadoghq.com/sb/5c4af2ac8-1f739e93ef" \
                  "\n=============================================================\n"

        if options['mail_admins']:
            message_body = get_deploy_email_message_body(
                environment=options['environment'], user=options['user'],
                compare_url=compare_url)
            call_command('mail_admins', message_body, **{'subject': 'Deploy successful', 'html': True})
Exemplo n.º 4
0
    def handle(self, *args, **options):

        root_dir = settings.FILEPATH
        git_snapshot = gitinfo.get_project_snapshot(root_dir, submodules=True)
        git_snapshot['diff_url'] = options.get('url', None)
        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            code_snapshot=git_snapshot,
        )
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print "\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated)

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}*. "
            "Find the diff {{diff_link}}".format(
                options['environment'],
                options['user'],
            )
        )
        if hasattr(settings, 'MIA_THE_DEPLOY_BOT_API'):
            link = diff_link(STYLE_SLACK, git_snapshot['diff_url'])
            requests.post(settings.MIA_THE_DEPLOY_BOT_API, data=json.dumps({
                "channel": "#dev",
                "username": "******",
                "text": deploy_notification_text.format(diff_link=link),
                "icon_emoji": ":see_no_evil:"
            }))

        if settings.DATADOG_API_KEY:
            tags = ['environment:{}'.format(options['environment'])]
            link = diff_link(STYLE_MARKDOWN, git_snapshot['diff_url'])
            datadog_api.Event.create(
                title="Deploy Success",
                text=deploy_notification_text.format(diff_link=link),
                tags=tags
            )

        if options['mail_admins']:
            snapshot_table = render_to_string('hqadmin/partials/project_snapshot.html', dictionary={'snapshot': git_snapshot})
            message = "Deployed by %s, cheers!" % options['user']
            snapshot_body = "<html><head><title>Deploy Snapshot</title></head><body><h2>%s</h2>%s</body></html>" % (message, snapshot_table)

            call_command('mail_admins', snapshot_body, **{'subject': 'Deploy successful', 'html': True})
Exemplo n.º 5
0
def system_info(request):
    environment = settings.SERVER_ENVIRONMENT

    context = get_hqadmin_base_context(request)
    context['couch_update'] = request.GET.get('couch_update', 5000)
    context['celery_update'] = request.GET.get('celery_update', 10000)
    context['celery_flower_url'] = getattr(settings, 'CELERY_FLOWER_URL', None)

    # recent changes
    recent_changes = int(request.GET.get('changes', 50))
    context['recent_changes'] = get_recent_changes(get_db(), recent_changes)
    context['rabbitmq_url'] = get_rabbitmq_management_url()
    context['hide_filters'] = True
    context['current_system'] = socket.gethostname()
    context['last_deploy'] = HqDeploy.get_latest(environment)
    context['snapshot'] = context['last_deploy'].code_snapshot if context[
        'last_deploy'] else {}

    context.update(check_redis())
    context.update(check_rabbitmq())
    context.update(check_celery_health())
    context.update(check_memcached())
    context.update(check_es_cluster_health())

    return render(request, "hqadmin/system_info.html", context)
Exemplo n.º 6
0
    def page_context(self):
        environment = settings.SERVER_ENVIRONMENT

        context = get_hqadmin_base_context(self.request)
        context['couch_update'] = self.request.GET.get('couch_update', 5000)
        context['celery_update'] = self.request.GET.get('celery_update', 10000)
        context['db_update'] = self.request.GET.get('db_update', 30000)
        context['self.request'] = getattr(settings, 'CELERY_FLOWER_URL', None)

        context['is_bigcouch'] = is_bigcouch()
        context['rabbitmq_url'] = get_rabbitmq_management_url()
        context['hide_filters'] = True
        context['current_system'] = socket.gethostname()
        context['deploy_history'] = HqDeploy.get_latest(environment, limit=5)

        context['user_is_support'] = hasattr(self.request,
                                             'user') and SUPPORT.enabled(
                                                 self.request.user.username)

        context['redis'] = service_checks.check_redis()
        context['rabbitmq'] = service_checks.check_rabbitmq()
        context['celery_stats'] = get_celery_stats()
        context['heartbeat'] = service_checks.check_heartbeat()

        context['cluster_health'] = escheck.check_es_cluster_health()

        return context
Exemplo n.º 7
0
    def page_context(self):
        environment = settings.SERVER_ENVIRONMENT

        context = get_hqadmin_base_context(self.request)
        context['couch_update'] = self.request.GET.get('couch_update', 5000)
        context['celery_update'] = self.request.GET.get('celery_update', 10000)
        context['db_update'] = self.request.GET.get('db_update', 30000)
        context['self.request'] = getattr(settings, 'CELERY_FLOWER_URL', None)

        context['is_bigcouch'] = is_bigcouch()
        context['rabbitmq_url'] = get_rabbitmq_management_url()
        context['hide_filters'] = True
        context['current_system'] = socket.gethostname()
        context['deploy_history'] = HqDeploy.get_latest(environment, limit=5)

        context['user_is_support'] = hasattr(self.request, 'user') and SUPPORT.enabled(self.request.user.username)

        context['redis'] = service_checks.check_redis()
        context['rabbitmq'] = service_checks.check_rabbitmq()
        context['celery_stats'] = get_celery_stats()
        context['heartbeat'] = service_checks.check_heartbeat()

        context['cluster_health'] = escheck.check_es_cluster_health()

        return context
Exemplo n.º 8
0
    def handle(self, *args, **options):
        start = parser.parse(options['startdate'])
        enddate = options['enddate']
        end = parser.parse(enddate) if enddate else datetime.now()

        ds = HqDeploy.get_list('production', start, end)
        ids = [d['id'] for d in ds]
        sha_prev = None
        print_row('Deploy Date', "Commit Date", "Diff")
        for id in ids:
            d = HqDeploy.get(id)
            s = d.code_snapshot['commits'][0]
            sha = s['sha']
            url = "https://github.com/dimagi/commcare-hq/compare/{sha_prev}...{sha}".format(
                sha=sha, sha_prev=sha_prev)
            print_row(d.date, s['date'], url)
            sha_prev = sha
    def handle(self, *args, **options):

        root_dir = settings.FILEPATH
        git_snapshot = gitinfo.get_project_snapshot(root_dir, submodules=True)

        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            code_snapshot=git_snapshot,
        )
        deploy.save()
        if options['mail_admins']:
            snapshot_table = render_to_string('hqadmin/partials/project_snapshot.html', dictionary={'snapshot': git_snapshot})
            message = "Deployed by %s, cheers!" % options['user']
            snapshot_body = "<html><head><title>Deploy Snapshot</title></head><body><h2>%s</h2>%s</body></html>" % (message, snapshot_table)

            call_command('mail_admins', snapshot_body, **{'subject': 'Deploy successful', 'html': True})
Exemplo n.º 10
0
def loadtest(request):
    # The multimech results api is kinda all over the place.
    # the docs are here: http://testutils.org/multi-mechanize/datastore.html

    scripts = ["submit_form.py", "ota_restore.py"]

    tests = []
    # datetime info seems to be buried in GlobalConfig.results[0].run_id,
    # which makes ORM-level sorting problematic
    for gc in Session.query(GlobalConfig).all()[::-1]:
        gc.scripts = dict((uc.script, uc) for uc in gc.user_group_configs)
        if gc.results:
            for script, uc in gc.scripts.items():
                uc.results = filter(lambda res: res.user_group_name == uc.user_group, gc.results)
            test = {"datetime": gc.results[0].run_id, "run_time": gc.run_time, "results": gc.results}
            for script in scripts:
                test[script.split(".")[0]] = gc.scripts.get(script)
            tests.append(test)

    context = get_hqadmin_base_context(request)
    context.update({"tests": tests, "hide_filters": True})

    date_axis = Axis(label="Date", dateFormat="%m/%d/%Y")
    tests_axis = Axis(label="Number of Tests in 30s")
    chart = LineChart("HQ Load Test Performance", date_axis, tests_axis)
    submit_data = []
    ota_data = []
    total_data = []
    max_val = 0
    max_date = None
    min_date = None
    for test in tests:
        date = test["datetime"]
        total = len(test["results"])
        max_val = total if total > max_val else max_val
        max_date = date if not max_date or date > max_date else max_date
        min_date = date if not min_date or date < min_date else min_date
        submit_data.append({"x": date, "y": len(test["submit_form"].results)})
        ota_data.append({"x": date, "y": len(test["ota_restore"].results)})
        total_data.append({"x": date, "y": total})

    deployments = [row["key"][1] for row in HqDeploy.get_list(settings.SERVER_ENVIRONMENT, min_date, max_date)]
    deploy_data = [{"x": min_date, "y": 0}]
    for date in deployments:
        deploy_data.extend([{"x": date, "y": 0}, {"x": date, "y": max_val}, {"x": date, "y": 0}])
    deploy_data.append({"x": max_date, "y": 0})

    chart.add_dataset("Deployments", deploy_data)
    chart.add_dataset("Form Submission Count", submit_data)
    chart.add_dataset("OTA Restore Count", ota_data)
    chart.add_dataset("Total Count", total_data)

    context["charts"] = [chart]

    template = "hqadmin/loadtest.html"
    return render(request, template, context)
Exemplo n.º 11
0
    def handle(self, *args, **options):
        start = parser.parse(options['startdate'])
        enddate = options['enddate']
        end = parser.parse(enddate) if enddate else datetime.utcnow()

        ds = HqDeploy.get_list('production', start, end)
        ids = [d['id'] for d in ds]
        sha_prev = None
        print_row('Deploy Date', "Commit Date", "Diff")
        for id in ids:
            d = HqDeploy.get(id)
            s = d.code_snapshot['commits'][0]
            sha = s['sha']
            url = "https://github.com/dimagi/commcare-hq/compare/{sha_prev}...{sha}".format(
                sha=sha,
                sha_prev=sha_prev
            )
            print_row(d.date, s['date'], url)
            sha_prev = sha
Exemplo n.º 12
0
def system_info(request):

    def human_bytes(bytes):
        #source: https://github.com/bartTC/django-memcache-status
        bytes = float(bytes)
        if bytes >= 1073741824:
            gigabytes = bytes / 1073741824
            size = '%.2fGB' % gigabytes
        elif bytes >= 1048576:
            megabytes = bytes / 1048576
            size = '%.2fMB' % megabytes
        elif bytes >= 1024:
            kilobytes = bytes / 1024
            size = '%.2fKB' % kilobytes
        else:
            size = '%.2fB' % bytes
        return size

    context = get_hqadmin_base_context(request)
    context['couch_update'] = request.GET.get('couch_update', 5000)
    context['celery_update'] = request.GET.get('celery_update', 10000)
    context['celery_flower_url'] = getattr(settings, 'CELERY_FLOWER_URL', None)

    # recent changes
    recent_changes = int(request.GET.get('changes', 50))
    context['recent_changes'] = get_recent_changes(get_db(), recent_changes)


    context['rabbitmq_url'] = get_rabbitmq_management_url()

    context['hide_filters'] = True
    context['current_system'] = socket.gethostname()

    environment = settings.SERVER_ENVIRONMENT
    context['last_deploy'] = HqDeploy.get_latest(environment)

    context['snapshot'] = context['last_deploy'].code_snapshot if context['last_deploy'] else {}

    #redis status
    redis_status = ""
    redis_results = ""
    if 'redis' in settings.CACHES:
        rc = cache.get_cache('redis')
        try:
            import redis
            redis_api = redis.StrictRedis.from_url('redis://%s' % rc._server)
            info_dict = redis_api.info()
            redis_status = "Online"
            redis_results = "Used Memory: %s" % info_dict['used_memory_human']
        except Exception, ex:
            redis_status = "Offline"
            redis_results = "Redis connection error: %s" % ex
    def handle(self, *args, **options):

        root_dir = settings.FILEPATH
        git_snapshot = gitinfo.get_project_snapshot(root_dir, submodules=True)

        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            code_snapshot=git_snapshot,
        )
        deploy.save()
        if options['mail_admins']:
            snapshot_table = render_to_string(
                'hqadmin/partials/project_snapshot.html',
                dictionary={'snapshot': git_snapshot})
            message = "Deployed by %s, cheers!" % options['user']
            snapshot_body = "<html><head><title>Deploy Snapshot</title></head><body><h2>%s</h2>%s</body></html>" % (
                message, snapshot_table)

            call_command('mail_admins', snapshot_body, **{
                'subject': 'Deploy successful',
                'html': True
            })
Exemplo n.º 14
0
def system_info(request):
    environment = settings.SERVER_ENVIRONMENT

    context = get_hqadmin_base_context(request)
    context['couch_update'] = request.GET.get('couch_update', 5000)
    context['celery_update'] = request.GET.get('celery_update', 10000)
    context['db_update'] = request.GET.get('db_update', 30000)
    context['celery_flower_url'] = getattr(settings, 'CELERY_FLOWER_URL', None)

    context['is_bigcouch'] = is_bigcouch()
    context['rabbitmq_url'] = get_rabbitmq_management_url()
    context['hide_filters'] = True
    context['current_system'] = socket.gethostname()
    context['deploy_history'] = HqDeploy.get_latest(environment, limit=5)

    context.update(check_redis())
    context.update(check_rabbitmq())
    context.update(check_celery_health())
    context.update(check_es_cluster_health())

    return render(request, "hqadmin/system_info.html", context)
Exemplo n.º 15
0
def system_info(request):
    environment = settings.SERVER_ENVIRONMENT

    context = get_hqadmin_base_context(request)
    context['couch_update'] = request.GET.get('couch_update', 5000)
    context['celery_update'] = request.GET.get('celery_update', 10000)
    context['celery_flower_url'] = getattr(settings, 'CELERY_FLOWER_URL', None)

    # recent changes
    recent_changes = int(request.GET.get('changes', 50))
    context['recent_changes'] = get_recent_changes(get_db(), recent_changes)
    context['rabbitmq_url'] = get_rabbitmq_management_url()
    context['hide_filters'] = True
    context['current_system'] = socket.gethostname()
    context['last_deploy'] = HqDeploy.get_latest(environment)
    context['snapshot'] = context['last_deploy'].code_snapshot if context['last_deploy'] else {}

    context.update(check_redis())
    context.update(check_rabbitmq())
    context.update(check_celery_health())
    context.update(check_memcached())
    context.update(check_es_cluster_health())

    return render(request, "hqadmin/system_info.html", context)
    def handle(self, **options):
        compare_url = options.get('url', None)
        minutes = options.get('minutes', None)

        deploy = HqDeploy(date=datetime.utcnow(),
                          user=options['user'],
                          environment=options['environment'],
                          diff_url=compare_url)
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print("\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated))

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}* in *{}* minutes. "
            .format(
                options['environment'],
                options['user'],
                minutes or '?',
            ))

        if options['environment'] == 'production':
            deploy_notification_text += "Monitor the {dashboard_link}. "

        if settings.MOBILE_INTEGRATION_TEST_TOKEN:
            deploy_notification_text += "Check the integration {integration_tests_link}. "
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests-pipeline/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )

        deploy_notification_text += "Find the diff {diff_link}"

        if hasattr(settings, 'MIA_THE_DEPLOY_BOT_API'):
            link = diff_link(STYLE_SLACK, compare_url)
            if options['environment'] == 'staging':
                channel = '#staging'
            elif options['environment'] == 'icds':
                channel = '#nic-server-standup'
            else:
                channel = '#hq-ops'
            requests.post(
                settings.MIA_THE_DEPLOY_BOT_API,
                data=json.dumps({
                    "username":
                    "******",
                    "channel":
                    channel,
                    "text":
                    deploy_notification_text.format(
                        dashboard_link=dashboard_link(STYLE_SLACK,
                                                      DASHBOARD_URL),
                        diff_link=link,
                        integration_tests_link=integration_tests_link(
                            STYLE_SLACK, INTEGRATION_TEST_URL)),
                }))

        if settings.DATADOG_API_KEY:
            tags = ['environment:{}'.format(options['environment'])]
            link = diff_link(STYLE_MARKDOWN, compare_url)
            datadog_api.Event.create(
                title="Deploy Success",
                text=deploy_notification_text.format(
                    dashboard_link=dashboard_link(STYLE_MARKDOWN,
                                                  DASHBOARD_URL),
                    diff_link=link,
                    integration_tests_link=integration_tests_link(
                        STYLE_MARKDOWN, INTEGRATION_TEST_URL)),
                tags=tags,
                alert_type="success")

            print("\n=============================================================\n" \
                  "Congratulations! Deploy Complete.\n\n" \
                  "Don't forget to keep an eye on the deploy dashboard to " \
                  "make sure everything is running smoothly.\n\n" \
                  "https://p.datadoghq.com/sb/5c4af2ac8-1f739e93ef" \
                  "\n=============================================================\n")

        if options['mail_admins']:
            message_body = get_deploy_email_message_body(
                user=options['user'], compare_url=compare_url)
            call_command('mail_admins', message_body, **{
                'subject': 'Deploy successful',
                'html': True
            })
            if settings.DAILY_DEPLOY_EMAIL:
                recipient = settings.DAILY_DEPLOY_EMAIL
                subject = 'Deploy Successful - {}'.format(
                    options['environment'])
                send_HTML_email(subject=subject,
                                recipient=recipient,
                                html_content=message_body)

        if settings.SENTRY_CONFIGURED and settings.SENTRY_API_KEY:
            create_update_sentry_release()
            notify_sentry_deploy(minutes)
Exemplo n.º 17
0
        if len(mc_stats) > 0:
            mc_status = "Online"
            stats_dict = mc_stats[0][1]
            bytes = stats_dict['bytes']
            max_bytes = stats_dict['limit_maxbytes']
            curr_items = stats_dict['curr_items']
            mc_results = "%s Items %s out of %s" % (curr_items, human_bytes(bytes),
                                                    human_bytes(max_bytes))

    except Exception, ex:
        mc_status = "Offline"
        mc_results = "%s" % ex
    context['memcached_status'] = mc_status
    context['memcached_results'] = mc_results

    context['last_deploy'] = HqDeploy.get_latest()

    #elasticsearch status
    #node status
    context.update(check_cluster_health())
    context.update(check_case_index())
    context.update(check_xform_index())
    context.update(check_exchange_index())

    return render(request, "hqadmin/system_info.html", context)

@require_superuser
def noneulized_users(request, template="hqadmin/noneulized_users.html"):
    context = get_hqadmin_base_context(request)

    days = request.GET.get("days", None)
Exemplo n.º 18
0
        mc_stats = mc._cache.get_stats()
        if len(mc_stats) > 0:
            mc_status = "Online"
            stats_dict = mc_stats[0][1]
            bytes = stats_dict["bytes"]
            max_bytes = stats_dict["limit_maxbytes"]
            curr_items = stats_dict["curr_items"]
            mc_results = "%s Items %s out of %s" % (curr_items, human_bytes(bytes), human_bytes(max_bytes))

    except Exception, ex:
        mc_status = "Offline"
        mc_results = "%s" % ex
    context["memcached_status"] = mc_status
    context["memcached_results"] = mc_results

    context["last_deploy"] = HqDeploy.get_latest()

    # elasticsearch status
    # node status
    context.update(check_cluster_health())
    context.update(check_case_index())
    context.update(check_xform_index())
    context.update(check_exchange_index())

    return render(request, "hqadmin/system_info.html", context)


@require_superuser
def noneulized_users(request, template="hqadmin/noneulized_users.html"):
    context = get_hqadmin_base_context(request)
Exemplo n.º 19
0
 def handle(self, *args, **options):
     deploy = HqDeploy(date=datetime.utcnow(), user=options['user'])
     deploy.save()
Exemplo n.º 20
0
    def handle(self, *args, **options):
        compare_url = options.get('url', None)
        minutes = options.get('minutes', None)

        deploy = HqDeploy(date=datetime.utcnow(),
                          user=options['user'],
                          environment=options['environment'],
                          diff_url=compare_url)
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print "\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated)

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}* in *{}* minutes. "
            "Monitor the {{dashboard_link}}. "
            "Check the integration {{integration_tests_link}}. "
            "Find the diff {{diff_link}}".format(
                options['environment'],
                options['user'],
                minutes or '?',
            ))

        if settings.MOBILE_INTEGRATION_TEST_TOKEN:
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )

        if hasattr(settings, 'MIA_THE_DEPLOY_BOT_API'):
            link = diff_link(STYLE_SLACK, compare_url)
            requests.post(
                settings.MIA_THE_DEPLOY_BOT_API,
                data=json.dumps({
                    "username":
                    "******",
                    "text":
                    deploy_notification_text.format(
                        dashboard_link=dashboard_link(STYLE_SLACK,
                                                      DASHBOARD_URL),
                        diff_link=link,
                        integration_tests_link=integration_tests_link(
                            STYLE_SLACK, INTEGRATION_TEST_URL)),
                }))

        if settings.DATADOG_API_KEY:
            tags = ['environment:{}'.format(options['environment'])]
            link = diff_link(STYLE_MARKDOWN, compare_url)
            datadog_api.Event.create(
                title="Deploy Success",
                text=deploy_notification_text.format(
                    dashboard_link=dashboard_link(STYLE_MARKDOWN,
                                                  DASHBOARD_URL),
                    diff_link=link,
                    integration_tests_link=integration_tests_link(
                        STYLE_MARKDOWN, INTEGRATION_TEST_URL)),
                tags=tags,
                alert_type="success")

            print "\n=============================================================\n" \
                  "Congratulations! Deploy Complete.\n\n" \
                  "Don't forget to keep an eye on the deploy dashboard to " \
                  "make sure everything is running smoothly.\n\n" \
                  "https://p.datadoghq.com/sb/5c4af2ac8-1f739e93ef" \
                  "\n=============================================================\n"

        if options['mail_admins']:
            message_body = get_deploy_email_message_body(
                environment=options['environment'],
                user=options['user'],
                compare_url=compare_url)
            call_command('mail_admins', message_body, **{
                'subject': 'Deploy successful',
                'html': True
            })
    def handle(self, **options):
        compare_url = options.get('url', None)
        minutes = options.get('minutes', None)

        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            diff_url=compare_url,
            commit=options['commit']
        )
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print("\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated))

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}* in *{}* minutes. ".format(
                options['environment'],
                options['user'],
                minutes or '?',
            )
        )

        if options['environment'] == 'production':
            deploy_notification_text += "Monitor the {dashboard_link}. "

        if settings.MOBILE_INTEGRATION_TEST_TOKEN:
            deploy_notification_text += "Check the integration {integration_tests_link}. "
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests-pipeline/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )

        deploy_notification_text += "Find the diff {diff_link}"

        if settings.DATADOG_API_KEY:
            link = diff_link(compare_url)
            create_metrics_event(
                title="Deploy Success",
                text=deploy_notification_text.format(
                    dashboard_link=dashboard_link(DASHBOARD_URL),
                    diff_link=link,
                    integration_tests_link=integration_tests_link(INTEGRATION_TEST_URL)
                ),
                tags={'environment': options['environment']},
                alert_type="success"
            )

            print(
                "\n=============================================================\n"
                "Congratulations! Deploy Complete.\n\n"
                "Don't forget to keep an eye on the deploy dashboard to "
                "make sure everything is running smoothly.\n\n"
                "https://app.datadoghq.com/dashboard/xch-zwt-vzv/hq-deploy-dashboard?tpl_var_environment={}"
                "\n=============================================================\n".format(
                    settings.SERVER_ENVIRONMENT
                )
            )

        if options['mail_admins']:
            message_body = get_deploy_email_message_body(user=options['user'], compare_url=compare_url)
            subject = 'Deploy Successful - {}'.format(options['environment'])
            call_command('mail_admins', message_body, **{'subject': subject, 'html': True})
            if settings.DAILY_DEPLOY_EMAIL:
                recipient = settings.DAILY_DEPLOY_EMAIL

                send_HTML_email(subject=subject,
                                recipient=recipient,
                                html_content=message_body)

        if settings.SENTRY_CONFIGURED and settings.SENTRY_API_KEY:
            create_update_sentry_release()
            notify_sentry_deploy(minutes)
Exemplo n.º 22
0
def loadtest(request):
    # The multimech results api is kinda all over the place.
    # the docs are here: http://testutils.org/multi-mechanize/datastore.html

    db_settings = settings.DATABASES["default"]
    db_settings['PORT'] = db_settings.get('PORT', '') or '5432'
    db_url = "postgresql://{USER}:{PASSWORD}@{HOST}:{PORT}/{NAME}".format(
        **db_settings)
    engine = create_engine(db_url)
    session = sessionmaker(bind=engine)
    current = session()

    scripts = ['submit_form.py', 'ota_restore.py']

    tests = []
    # datetime info seems to be buried in GlobalConfig.results[0].run_id,
    # which makes ORM-level sorting problematic
    for gc in current.query(GlobalConfig).all()[::-1]:
        gc.scripts = dict((uc.script, uc) for uc in gc.user_group_configs)
        if gc.results:
            for script, uc in gc.scripts.items():
                uc.results = filter(
                    lambda res: res.user_group_name == uc.user_group,
                    gc.results)
            test = {
                'datetime': gc.results[0].run_id,
                'run_time': gc.run_time,
                'results': gc.results,
            }
            for script in scripts:
                test[script.split('.')[0]] = gc.scripts.get(script)
            tests.append(test)

    context = get_hqadmin_base_context(request)
    context.update({
        "tests": tests,
        "hide_filters": True,
    })

    date_axis = Axis(label="Date", dateFormat="%m/%d/%Y")
    tests_axis = Axis(label="Number of Tests in 30s")
    chart = LineChart("HQ Load Test Performance", date_axis, tests_axis)
    submit_data = []
    ota_data = []
    total_data = []
    max_val = 0
    max_date = None
    min_date = None
    for test in tests:
        date = test['datetime']
        total = len(test['results'])
        max_val = total if total > max_val else max_val
        max_date = date if not max_date or date > max_date else max_date
        min_date = date if not min_date or date < min_date else min_date
        submit_data.append({'x': date, 'y': len(test['submit_form'].results)})
        ota_data.append({'x': date, 'y': len(test['ota_restore'].results)})
        total_data.append({'x': date, 'y': total})

    deployments = [
        row['key'][1] for row in HqDeploy.get_list(settings.SERVER_ENVIRONMENT,
                                                   min_date, max_date)
    ]
    deploy_data = [{'x': min_date, 'y': 0}]
    for date in deployments:
        deploy_data.extend([{
            'x': date,
            'y': 0
        }, {
            'x': date,
            'y': max_val
        }, {
            'x': date,
            'y': 0
        }])
    deploy_data.append({'x': max_date, 'y': 0})

    chart.add_dataset("Deployments", deploy_data)
    chart.add_dataset("Form Submission Count", submit_data)
    chart.add_dataset("OTA Restore Count", ota_data)
    chart.add_dataset("Total Count", total_data)

    context['charts'] = [chart]

    template = "hqadmin/loadtest.html"
    return render(request, template, context)
Exemplo n.º 23
0
def loadtest(request):
    # The multimech results api is kinda all over the place.
    # the docs are here: http://testutils.org/multi-mechanize/datastore.html

    db_settings = settings.DATABASES["default"]
    db_settings['PORT'] = db_settings.get('PORT', '') or '5432'
    db_url = "postgresql://{USER}:{PASSWORD}@{HOST}:{PORT}/{NAME}".format(
        **db_settings
    )
    engine = create_engine(db_url)
    session = sessionmaker(bind=engine)
    current = session()

    scripts = ['submit_form.py', 'ota_restore.py']

    tests = []
    # datetime info seems to be buried in GlobalConfig.results[0].run_id,
    # which makes ORM-level sorting problematic
    for gc in current.query(GlobalConfig).all()[::-1]:
        gc.scripts = dict((uc.script, uc) for uc in gc.user_group_configs)
        if gc.results:
            for script, uc in gc.scripts.items():
                uc.results = filter(
                    lambda res: res.user_group_name == uc.user_group,
                    gc.results
                )
            test = {
                'datetime': gc.results[0].run_id,
                'run_time': gc.run_time,
                'results': gc.results,
            }
            for script in scripts:
                test[script.split('.')[0]] = gc.scripts.get(script)
            tests.append(test)

    context = get_hqadmin_base_context(request)
    context.update({
        "tests": tests,
        "hide_filters": True,
    })

    date_axis = Axis(label="Date", dateFormat="%m/%d/%Y")
    tests_axis = Axis(label="Number of Tests in 30s")
    chart = LineChart("HQ Load Test Performance", date_axis, tests_axis)
    submit_data = []
    ota_data = []
    total_data = []
    max_val = 0
    max_date = None
    min_date = None
    for test in tests:
        date = test['datetime']
        total = len(test['results'])
        max_val = total if total > max_val else max_val
        max_date = date if not max_date or date > max_date else max_date
        min_date = date if not min_date or date < min_date else min_date
        submit_data.append({'x': date, 'y': len(test['submit_form'].results)})
        ota_data.append({'x': date, 'y': len(test['ota_restore'].results)})
        total_data.append({'x': date, 'y': total})

    deployments = [row['key'][1] for row in HqDeploy.get_list(settings.SERVER_ENVIRONMENT, min_date, max_date)]
    deploy_data = [{'x': min_date, 'y': 0}]
    for date in deployments:
        deploy_data.extend([{'x': date, 'y': 0}, {'x': date, 'y': max_val}, {'x': date, 'y': 0}])
    deploy_data.append({'x': max_date, 'y': 0})

    chart.add_dataset("Deployments", deploy_data)
    chart.add_dataset("Form Submission Count", submit_data)
    chart.add_dataset("OTA Restore Count", ota_data)
    chart.add_dataset("Total Count", total_data)

    context['charts'] = [chart]

    template = "hqadmin/loadtest.html"
    return render(request, template, context)
Exemplo n.º 24
0
        if len(mc_stats) > 0:
            mc_status = "Online"
            stats_dict = mc_stats[0][1]
            bytes = stats_dict['bytes']
            max_bytes = stats_dict['limit_maxbytes']
            curr_items = stats_dict['curr_items']
            mc_results = "%s Items %s out of %s" % (curr_items, human_bytes(bytes),
                                                    human_bytes(max_bytes))

    except Exception, ex:
        mc_status = "Offline"
        mc_results = "%s" % ex
    context['memcached_status'] = mc_status
    context['memcached_results'] = mc_results

    context['last_deploy'] = HqDeploy.get_latest()

    #elasticsearch status
    #node status
    context.update(check_cluster_health())
    context.update(check_case_index())
    context.update(check_xform_index())
    context.update(check_exchange_index())

    return render(request, "hqadmin/system_info.html", context)

@require_superuser
def noneulized_users(request, template="hqadmin/noneulized_users.html"):
    context = get_hqadmin_base_context(request)

    days = request.GET.get("days", None)
Exemplo n.º 25
0
    def handle(self, **options):
        compare_url = options.get('url', None)
        minutes = options.get('minutes', None)

        deploy = HqDeploy(
            date=datetime.utcnow(),
            user=options['user'],
            environment=options['environment'],
            diff_url=compare_url
        )
        deploy.save()

        #  reset PillowTop errors in the hope that a fix has been deployed
        rows_updated = PillowError.bulk_reset_attempts(datetime.utcnow())
        if rows_updated:
            print("\n---------------- Pillow Errors Reset ----------------\n" \
                  "{} pillow errors queued for retry\n".format(rows_updated))

        deploy_notification_text = (
            "CommCareHQ has been successfully deployed to *{}* by *{}* in *{}* minutes. ".format(
                options['environment'],
                options['user'],
                minutes or '?',
            )
        )

        if options['environment'] == 'production':
            deploy_notification_text += "Monitor the {dashboard_link}. "

        if settings.MOBILE_INTEGRATION_TEST_TOKEN:
            deploy_notification_text += "Check the integration {integration_tests_link}. "
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )
            requests.get(
                'https://jenkins.dimagi.com/job/integration-tests-pipeline/build',
                params={'token': settings.MOBILE_INTEGRATION_TEST_TOKEN},
            )

        deploy_notification_text += "Find the diff {diff_link}"

        if hasattr(settings, 'MIA_THE_DEPLOY_BOT_API'):
            link = diff_link(STYLE_SLACK, compare_url)
            if options['environment'] == 'staging':
                channel = '#staging'
            elif options['environment'] == 'icds':
                channel = '#nic-server-standup'
            else:
                channel = '#hq-ops'
            requests.post(settings.MIA_THE_DEPLOY_BOT_API, data=json.dumps({
                "username": "******",
                "channel": channel,
                "text": deploy_notification_text.format(
                    dashboard_link=dashboard_link(STYLE_SLACK, DASHBOARD_URL),
                    diff_link=link,
                    integration_tests_link=integration_tests_link(STYLE_SLACK, INTEGRATION_TEST_URL)
                ),
            }))

        if settings.DATADOG_API_KEY:
            tags = ['environment:{}'.format(options['environment'])]
            link = diff_link(STYLE_MARKDOWN, compare_url)
            datadog_api.Event.create(
                title="Deploy Success",
                text=deploy_notification_text.format(
                    dashboard_link=dashboard_link(STYLE_MARKDOWN, DASHBOARD_URL),
                    diff_link=link,
                    integration_tests_link=integration_tests_link(STYLE_MARKDOWN, INTEGRATION_TEST_URL)
                ),
                tags=tags,
                alert_type="success"
            )

            print("\n=============================================================\n" \
                  "Congratulations! Deploy Complete.\n\n" \
                  "Don't forget to keep an eye on the deploy dashboard to " \
                  "make sure everything is running smoothly.\n\n" \
                  "https://p.datadoghq.com/sb/5c4af2ac8-1f739e93ef" \
                  "\n=============================================================\n")

        if options['mail_admins']:
            message_body = get_deploy_email_message_body(user=options['user'], compare_url=compare_url)
            call_command('mail_admins', message_body, **{'subject': 'Deploy successful', 'html': True})
            if settings.DAILY_DEPLOY_EMAIL:
                recipient = settings.DAILY_DEPLOY_EMAIL
                subject = 'Deploy Successful - {}'.format(options['environment'])
                send_HTML_email(subject=subject,
                                recipient=recipient,
                                html_content=message_body)

        if settings.SENTRY_CONFIGURED and settings.SENTRY_API_KEY:
            create_update_sentry_release()
            notify_sentry_deploy(minutes)