Example #1
0
def get_all_deploys(request):
    env_stage_a = environs_helper.get_env_by_stage(request, NGAPP_A, "prod")
    env_stage_b = environs_helper.get_env_by_stage(request, NGAPP_B, "prod")
    index = int(request.GET.get('page_index', '1'))
    size = int(request.GET.get('page_size', '%d' % DEFAULT_PAGE_SIZE))
    filter = {}
    filter['envId'] = [env_stage_a['id'], env_stage_b['id']]
    filter['pageIndex'] = index
    filter['pageSize'] = size
    result = deploys_helper.get_all(request, **filter)
    deploy_summaries = []
    for deploy in result['deploys']:
        build = builds_helper.get_build(request, deploy['buildId'])
        summary = {}
        summary['deploy'] = deploy
        summary['build'] = build
        deploy_summaries.append(summary)

    # get the rollback history from S3 between the start_timestamp and end_timestamp
    #   from the current page, assumed the deploy_summaries is in descend order.
    #   if it is the first page, set the end_timestamp to now.
    if len(deploy_summaries):
        start_timestamp = deploy_summaries[-1]['deploy']['startDate']
        if index == 1:
            end_timestamp = time.time() * 1000
        else:
            end_timestamp = deploy_summaries[0]['deploy']['startDate']
    else:
        start_timestamp = 0
        end_timestamp = time.time() * 1000

    s3 = s3_helper.S3Helper(bucket_name=S3_INTERNAL_TOOLS_BUCKET_NAME)
    history_key = get_rollback_history_key()
    rollbacks = s3.list(history_key)
    for rollback in rollbacks:
        timestamp = rollback.name[len(history_key) + 1:]
        timestamp = float(timestamp)
        if start_timestamp <= timestamp and timestamp <= end_timestamp:
            summary = json.loads(s3.download_string(rollback.name))
            deploy_summaries.append(summary)

    # order the history by deploy.start_date descend
    deploy_summaries.sort(key=lambda summary: summary['deploy']['startDate'],
                          reverse=True)

    return render(
        request, 'ngapp2/ngapp2_history.html', {
            "deploy_summaries": deploy_summaries,
            "pageIndex": index,
            "pageSize": DEFAULT_PAGE_SIZE,
            "disablePrevious": index <= 1,
            "disableNext": len(result['deploys']) < DEFAULT_PAGE_SIZE,
        })
Example #2
0
    def start_roll_back(self, request):
        Recorder(settings.NGAPP_ROLLBACK_STATUS_NODE).init_info()

        params = request.POST
        reason = params.get("description")
        build_sha = params.get("build")
        operator = request.teletraan_user_id.name

        if is_prod():
            virtual_env = os.path.join(os.environ.get("VIRTUAL_ENV"), "bin")
            cmd = [
                os.path.join(virtual_env, "python"),
                os.path.join(virtual_env, "ngapp-rollback")
            ]
        else:
            cmd = [
                os.path.join(os.environ.get("BASE_DIR"),
                             "../integ_test/ngapp2/rollback")
            ]

        result = execute(cmd)
        if result < 0:
            raise

        # record history
        timestamp = time.time() * 1000
        deploy = {
            "startDate": timestamp,
            "type": "ROLLBACK",
            "state": "SUCCEEDED",
            "acceptanceStatus": "ACCEPTED",
            "operator": operator,
            "successTotal": 1,
            "total": 1,
            "reason": reason
        }
        build = {"commitShort": build_sha}
        history = {"deploy": deploy, "build": build}

        s3 = s3_helper.S3Helper(bucket_name=S3_INTERNAL_TOOLS_BUCKET_NAME)
        history_key = get_rollback_history_key()
        s3.upload_string("%s/%d" % (history_key, timestamp),
                         json.dumps(history))
def s3_to_db_comparison():
    s3_obj = s3.S3Helper()
    db_obj = db.DbHelper()
    bucket = ''
    file_name = ''
    database = ''
    host = ''
    port = ''
    user = ''
    password = ''
    db_name = ''
    table_name = ''
    sql = ''
    with open("comparison.json") as f:
        data = json.load(f)
    for obj in data:
        if obj == "s3":
            bucket = data[obj]['data']['bucket']
            file_name = data[obj]['data']['filename']
        elif obj == 'sqlite':
            sql = data[obj]['connection']['path']
            database = obj
        else:
            database = obj
            user = data[obj]['connection']['user']
            host = data[obj]['connection']['host']
            port = data[obj]['connection']['port']
            password = data[obj]['connection']['password']
            db_name = data[obj]['connection']['database']
            table_name = data[obj]['data']['table']

    conn = db_obj.generate_connection_string(database, user, password, host,
                                             port, db_name, sql)
    db_obj.create_conn(conn)
    query = db_obj.query_builder('query.json', 'query1')
    df1 = db_obj.query_execution(query)
    df2 = s3_obj.read_s3_file(bucket, file_name)
    result = comparison.compare_dataframes(df1, df2)
    if bool(result):
        print("Both data are same.")
    else:
        print("Both data aren't same.")