Пример #1
0
def build_diff(args):
    (k, time_slots, conf_lev, set_val, set_name, num_type) = args

    log.info(set_name)
    try:

        diff_mean = get_diff(set_val, time_slots, num_type, conf_lev)
        if num_type == FLOAT_TYPE:
            #norm_diff_mean,output_status=normalize_data(diff_mean[:,0])
            norm_diff_mean,output_status=normalize_data(diff_mean)

        elif num_type == INT_TYPE:

            #num_discrete_vals=len(set(list(diff_mean[:,0])))
            num_discrete_vals=len(set(list(diff_mean)))
            log.info('num_discrete_vals :' + str(num_discrete_vals))
            if num_discrete_vals>1:
                output_status = 0
                norm_diff_mean = diff_mean
            else:
                output_status = -1
                norm_diff_mean = list(set(diff_mean))
                #norm_diff_mean=list(set(diff_mean[:,0]))
        else:
            pass

    except Exception as e:
        log.error(traceback.print_exc())
        log.error('Error in processing data feature, excluded from analysis ' + str(e))
        output_status = -1
        norm_diff_mean = None
        return (k,[output_status, norm_diff_mean])

    return (k, [output_status, norm_diff_mean])
Пример #2
0
def upload_compact(request):
    """
    上传合同模板
    :param request:
    :return:
    """
    if request.method == 'POST':
        compact = request.FILES.get('compact', None)
        if not compact:
            log.error('合同模板上传失败:没有上传文件')
            msg = '请先选择要上传的文件!'
            status = 400
        else:
            try:
                with open(BASE_DIR + '/static/media/compact/' + compact.name,
                          'wb+') as f:
                    for chunk in compact.chunks():
                        f.write(chunk)
                msg = '上传成功!'
                status = 200
            except Exception as e:
                log.error('合同模板上传失败:' + str(e))
                msg = '上传失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #3
0
def rotate_pic(request):
    """
    旋转图片
    :param request:
    :return:
    """
    pic_url = request.GET.get('pic_url', '')
    if any((not pic_url, len(pic_url) == 0)):
        log.error('旋转图片失败:无效的文件名')
        msg = '操作失败!'
        status = 400
    else:
        try:
            pic_name = pic_url.split('/')[-1]
            im = Image.open(
                os.path.join(BASE_DIR, 'static/media/pic/') + pic_name)
            out = im.transpose(Image.ROTATE_270)
            out.save(os.path.join(BASE_DIR, 'static/media/pic/') + pic_name)
            msg = '操作成功!'
            status = 200
        except Exception as e:
            log.error('旋转图片失败:' + str(e))
            msg = '操作失败!'
            status = 400
    return JsonResponse({'status': status, 'msg': msg})
Пример #4
0
def register(request):
    """
    会员申请
    :param request:
    :return:
    """
    if request.method == 'POST':
        name = request.POST.get('name', '')
        phone = request.POST.get('phone', '')
        if any((not name, not phone, len(name) == 0, len(phone) == 0)):
            log.error('申请会员失败:名字或电话号码不能为空')
            msg = '用户名或手机号不能为空'
            status = 400
        else:
            ret = re.match(r"^1[3456789]\d{9}$", phone)
            if not ret:
                msg = '手机号格式错误'
                status = 400
            else:
                try:
                    if Members.objects.filter(phone=phone).exists():
                        return JsonResponse({
                            'status': 400,
                            'msg': '该手机号已绑定会员账号,请更换手机号或直接登录!'
                        })
                    Members.objects.create(name=name,
                                           phone=phone,
                                           auth_str=(make_pwd(phone[-6:])))
                    status = 200
                    msg = '申请成功,您的初始密码为手机号后6位'
                except Exception as e:
                    log.error('申请会员失败:' + str(e))
                    status = 400
                    msg = '申请失败!'
        return JsonResponse({'status': status, 'msg': msg})
Пример #5
0
def get_mem_materials(request):
    """
    获取用户上传的图片
    :param request:
    :return:
    """
    mem_id = request.GET.get('id', '')
    if any((not mem_id, len(mem_id) == 0)):
        log.error('获取用户上传的图片失败:无效的用户id')
        msg = '获取用户上传图片失败!'
        status = 400
    else:
        try:
            materials = MemberMaterials.objects.filter(mem_id=mem_id)
            pic_list = []
            for material in materials:
                pic_list.append(
                    os.path.join(BASE_URL, 's/media/pic/') +
                    material.materials_name)
            return JsonResponse({'status': 200, 'pic_list': pic_list})
        except Exception as e:
            log.error('获取用户上传的图片失败:' + str(e))
            msg = '获取用户上传图片失败!'
            status = 400
    return JsonResponse({'status': status, 'msg': msg})
Пример #6
0
def edit_addr(request):
    """
    修改地址
    :param request:
    :return:
    """
    if request.method == 'POST':
        addr_id = request.POST.get('id', '')
        area = request.POST.get('area', '')
        addr = request.POST.get('addr', '')
        print(addr_id, type(area), type(addr))
        if any((not addr_id, not area, not addr, len(addr_id) == 0,
                len(area) == 0, len(addr) == 0)):
            msg = '区域或详细地址不能为空!'
            status = 400
        else:
            try:
                area_addr = AreaAddr.objects.get(pk=addr_id)
                area_addr.area, area_addr.addr = area, addr
                area_addr.save()
                msg = '地址修改成功!'
                status = 200
            except Exception as e:
                log.error('地址修改失败:' + str(e))
                msg = '地址修改失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #7
0
def get_diff(set_val,time_slots,num_type,conf_lev):

    time_slots_utc = dtime_to_unix(time_slots)
    TIMELET_INV_seconds = (time_slots[1]-time_slots[0]).seconds
    diff_mean = list()

    for r, utc_t in enumerate(time_slots_utc):
        utc_t_s = utc_t
        utc_t_e = utc_t + TIMELET_INV_seconds
        idx = np.nonzero((set_val[0] >= utc_t_s) & (set_val[0] < utc_t_e))[0]

        if len(idx) < 2:
            diff_val = np.inf
        else:
            temp_val = abs(np.diff(set_val[1][idx]))
            upper_val = np.sort(temp_val)[int(np.floor(len(temp_val)*conf_lev)):]
            if len(upper_val) == 0:
                 diff_val = np.inf
            else:
                if num_type == FLOAT_TYPE:
                    diff_val = np.mean(upper_val)
                elif num_type == INT_TYPE:
                    diff_val = int(stats.mode(upper_val)[0])
                else:
                    log.error('Sample type must either INT or FLOAT type')
                    raise NameError('Sample type must either INT or FLOAT type')

            #diff_val=max(abs(diff(set_val[1][idx])))
            #sort(abs(diff(set_val[1][idx])))[::-1]

        diff_mean.append(diff_val)

    #diff_mean=np.array(diff_mean)[:,np.newaxis]
    diff_mean = np.array(diff_mean)
    return diff_mean
Пример #8
0
def apply_for_qualification(request):
    """
    申请资质
    :param request:
    :return:
    """
    if request.method == 'POST':
        mem_id = request.POST.get('mem_id', '')
        addr_id = request.POST.get('addr_id', '')
        if any((not mem_id, not addr_id, len(mem_id) == 0, len(addr_id) == 0)):
            log.error('申请资质失败:无效的参数')
            msg = '申请失败!'
            status = 400
        else:
            try:
                member = Members.objects.get(pk=mem_id)
                if member.qualification_code == 1:
                    return JsonResponse({
                        'status': 400,
                        'msg': '您已申请资质,请勿重复申请!'
                    })
                member.area_id = addr_id
                member.qualification_code = 1
                member.save()
                msg = '申请成功!'
                status = 200
            except Exception as e:
                log.error('申请资质失败:' + str(e))
                msg = '申请失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #9
0
def get_area(request):
    """
    获取所有地址
    :param request:
    :return:
    """
    try:
        areas = AreaAddr.objects.all()
        area_list = []
        for area in areas:
            area_list.append({
                'id': area.id,
                'area': area.area,
                'addr': area.addr
            })
        return JsonResponse({
            'status': 200,
            'msg': '操作成功!',
            'data': {
                'area_list': area_list
            }
        })
    except Exception as e:
        log.error('获取地址失败:' + str(e))
        return JsonResponse({'status': 400, 'msg': '操作失败!'})
Пример #10
0
def change_mem_status(request):
    """
    修改用户审批状态
    :param request:
    :return:
    """
    if request.method == 'POST':
        mem_id = request.POST.get('id', '')
        code = request.POST.get('code', '')
        if any((not mem_id, not code, len(mem_id) == 0, len(code) == 0)):
            log.error('修改客户资质失败:无效的用户id')
            msg = '操作失败!'
            status = 400
        else:
            try:
                member = Members.objects.get(pk=mem_id)
                member.qualification_code = code
                member.save()
                msg = '操作成功!'
                status = 200
            except Exception as e:
                log.error('修改客户资质失败:' + str(e))
                msg = '操作失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #11
0
def save_diff_data_summary_json(bldg_key, sensor_names_hash, diff_out):
    from log_util import log
    import traceback
    try:

        XDIFF_Sensor_STATE = diff_out['diffdata_state_mat']
        #XDIFF_Weather_STATE = diff_out['diffdata_weather_mat']
        #XDIFF_Time_STATE = diff_out['diffdata_time_mat']

        #Xdiff_Time = diff_out['diff_time_slot']
        #diffdata_zvar = diff_out['diffdata_zvar']

        diffdata_exemplar = dict()
        for de, cl in diff_out['diffdata_exemplar'].iteritems():
            kuid = sensor_names_hash[de]
            clist = list()
            for cn in cl:
                vuid = sensor_names_hash[cn]
                clist.append(vuid)
            diffdata_exemplar.update({kuid: clist})

        XDIFF_Sensor_NAMES = list()
        for dn in diff_out['sensor_names']:
            uid = sensor_names_hash[dn]
            XDIFF_Sensor_NAMES.append(uid)

        #X_Weather_NAMES = diff_out['weather_names']
        #X_Time_NAMES = diff_out['time_names']

        num_cols = XDIFF_Sensor_STATE.shape[1]

        x_states = list()
        for c in xrange(0, num_cols):
            x_states.append(XDIFF_Sensor_STATE[:, c].tolist())

        x_sensor_diff = {"sensor-names": XDIFF_Sensor_NAMES
                        ,"sensor-exemplar": diffdata_exemplar
                        ,"sensor-state": x_states}

        with open(JSON_DIR + bldg_key.lower() + "_sensor_feature_diff.json", 'w') as f:
            f.write(json.dumps(x_sensor_diff))

    except Exception as e:
        log.error(traceback.print_exc())
        log.error(str(e))
Пример #12
0
def login(request):
    """
    登录页
    :param request:
    :return:
    """
    if request.method == 'POST':
        # 获取用户名和口令
        name = request.POST.get('username', '')
        pwd = request.POST.get('password', '')

        if any((not name, not pwd, len(name) == 0, len(pwd) == 0)):
            msg = '用户名或口令不能为空!'
            status = 400
        else:
            try:
                ret = Members.objects.filter(name=name, auth_str=make_pwd(pwd))
                if ret.exists():
                    login_user = ret.first()

                    if login_user.activate == 1:
                        # 将登陆的用户信息存在session中
                        request.session['login_mem_user'] = {
                            'id': login_user.id,
                            'name': login_user.name
                        }
                        return JsonResponse({
                            'status': 200,
                            'msg': '登录成功!',
                            'data': {
                                'id': login_user.id
                            }
                        })
                    else:
                        msg = '您的会员申请尚未通过,请与后台管理员联系!'
                        status = 400
                else:
                    msg = '用户名或密码错误'
                    status = 400
            except Exception as e:
                log.error('用户登录失败:' + str(e))
                msg = '登录失败!'
                status = 400

        return JsonResponse({'status': status, 'msg': msg})
Пример #13
0
def download_compact(request):
    """
    下载合同模板
    :param request:
    :return:
    """
    file_name = request.GET.get('file_name', '')
    if any((not file_name, len(file_name) == 0)):
        log.error('下载合同模板失败:无效的文件名')
    try:
        f = open(BASE_DIR + '/static/media/compact/' + file_name, 'rb')
        response = FileResponse(f)
        response['Content-Type'] = 'application/octet-stream'
        response[
            'Content-Disposition'] = f'attachment;filename="{urllib.parse.quote(file_name)}"'
        return response
    except Exception as e:
        log.error('下载合同模板失败:' + str(e))
Пример #14
0
def get_compact(request):
    """
    获取所有合同模板
    :param request:
    :return:
    """
    try:
        compact_list = os.listdir(BASE_DIR + '/static/media/compact')
        return JsonResponse({
            'status': 200,
            'msg': '获取成功!',
            'data': {
                'compact_list': compact_list
            }
        })
    except Exception as e:
        log.error('获取模板文件列表失败:' + str(e))
        return JsonResponse({'status': 400, 'msg': '获取失败!'})
Пример #15
0
def pp_verify_sensor_data_format(tup):
    (key, data_list, time_slots, q) = tup

    log.info(' checking ' + key + '...')

    try:
        for i, samples in enumerate(data_list):
            for j, each_sample in enumerate(samples):

                if each_sample == []:
                    q.put([key, i, j])
                    log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

                elif not isinstance(each_sample, int) and not isinstance(each_sample, float):
                    q.put([key, i, j])
                    log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

    except Exception as e:
        log.error(str(e))
Пример #16
0
def pp_verify_sensor_data_format(tup):
    (key, data_list, time_slots, q) = tup

    log.info(' checking ' + key + '...')

    try:
        for i, samples in enumerate(data_list):
            for j, each_sample in enumerate(samples):

                if each_sample == []:
                    q.put([key, i, j])
                    log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

                elif not isinstance(each_sample, int) and not isinstance(each_sample, float):
                    q.put([key, i, j])
                    log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

    except Exception as e:
        log.error(traceback.print_exc())
        log.error(str(e))
Пример #17
0
def save_avg_data_summary_json(bldg_key, sensor_names_hash, avg_out):
    from log_util import log
    import traceback

    try:
        X_Sensor_STATE = avg_out['avgdata_state_mat']
        #X_Weather_STATE = avg_out['avgdata_weather_mat']

        avgdata_exemplar = dict()
        for ae, cl in avg_out['avgdata_exemplar'].iteritems():
            kuid = sensor_names_hash[ae]
            clist = list()
            for cn in cl:
                vuid = sensor_names_hash[cn]
                clist.append(vuid)
            avgdata_exemplar.update({kuid: clist})

        #X_Sensor_NAMES = avg_out['sensor_names']
        #X_Weather_NAMES = avg_out['weather_names']

        X_Sensor_NAMES = list()
        for an in avg_out['sensor_names']:
            uid = sensor_names_hash[an]
            X_Sensor_NAMES.append(uid)

        num_cols = X_Sensor_STATE.shape[1]

        x_states = list()
        for c in xrange(0, num_cols):
            x_states.append(X_Sensor_STATE[:, c].tolist())

        x_sensor_avg = {"sensor-names": X_Sensor_NAMES
                        ,"sensor-exemplar": avgdata_exemplar
                        ,"sensor-state": x_states}

        with open(JSON_DIR + bldg_key.lower() + "_sensor_feature_avg.json", 'w') as f:
            f.write(json.dumps(x_sensor_avg))

    except Exception as e:
        log.error(traceback.print_exc())
        log.error(str(e))
Пример #18
0
def save_processed_json(sensor_names_hash, ds_out):
    from log_util import log
    import traceback
    try:
        diff_name_list = ds_out['diffdata_dict']['build_diff_matrix_out'].Xdiff_Names

        avg_ts, avg_val = ts_npval(ds_out['avgdata_dict']['build_feature_matrix_out'].X_Time,
                                    ds_out['avgdata_dict']['build_feature_matrix_out'].X_Feature)

        diff_ts, diff_val = ts_npval(ds_out['diffdata_dict']['build_diff_matrix_out'].Xdiff_Time,
                                    ds_out['diffdata_dict']['build_diff_matrix_out'].Xdiff_Mat)

        ts_indices = list()
        ts_list = None
        if avg_ts.shape[0] < diff_ts.shape[0]:
            ts_indices = [np.nonzero(diff_ts == ts)[0] for ts in avg_ts]
            ts_list = avg_ts.tolist()
        else:
            ts_indices = [np.nonzero(avg_ts == ts)[0] for ts in diff_ts]
            ts_list = diff_ts.tolist()

        for i, name in enumerate(diff_name_list):

            avg = avg_val[:, i].tolist()
            if len(ts_indices) < avg_val[:, i].shape[0]:
                avg = avg_val[:, i][ts_indices][:, 0].tolist()

            diff = diff_val[:, i].tolist()
            if len(ts_indices) < diff_val[:, i].shape[0]:
                diff = diff_val[:, i][ts_indices][:, 0].tolist()

            json_out = list()
            for t_idx in xrange(0, len(ts_indices)):
                json_out.append([ts_list[t_idx], avg[t_idx], diff[t_idx]])

            uid = sensor_names_hash[name]
            with open(JSON_DIR + "preproc-" + uid + ".json", 'w') as f:
                f.write(json.dumps(json_out))
    except Exception as e:
        log.error(traceback.print_exc())
        log.error(str(e))
Пример #19
0
def delete_compact(request):
    """
    删除合同模板
    :param request:
    :return:
    """
    file_name = request.GET.get('file_name', '')
    if any((not file_name, len(file_name) == 0)):
        log.error('合同模板删除失败:无效的文件名!')
        msg = '删除失败!'
        status = 400
    else:
        try:
            os.remove(BASE_DIR + '/static/media/compact/' + file_name)
            msg = '删除成功!'
            status = 200
        except Exception as e:
            log.error('合同模板删除失败:' + str(e))
            msg = '删除失败!'
            status = 200
    return JsonResponse({'status': status, 'msg': msg})
Пример #20
0
def compute_joint_prob(data_mat, state_idx_set, state_val_set):
    num_samples = data_mat.shape[0]
    num_states = data_mat.shape[1]

    if len(state_idx_set) != len(state_val_set):
        log.error('the length of state_set and state_val must be same')
        raise NameError('the length of state_set and state_val must be same')

    joint_idx = set(range(num_samples))
    for k, state_idx in enumerate(state_idx_set):
        samples = data_mat[:, state_idx]
        sub_joint_idx = set([])
        for state_val in state_val_set[k]:
            sub_joint_idx = sub_joint_idx | set(np.nonzero(samples == state_val)[0])
        joint_idx = joint_idx & sub_joint_idx
    joint_prob = len(joint_idx)/num_samples

    if num_samples == 0:
        return 0
    else:
        return joint_prob
Пример #21
0
def change_pwd(request):
    """
    修改密码
    :param request:
    :return:
    """
    if request.method == 'POST':
        mem_id = request.POST.get('mem_id', '')
        old_pwd = request.POST.get('old_pwd', '')
        new_pwd1 = request.POST.get('new_pwd1', '')
        new_pwd2 = request.POST.get('new_pwd2', '')
        if any((not mem_id, not old_pwd, not new_pwd1, not new_pwd2,
                len(mem_id) == 0, len(new_pwd1) == 0, len(new_pwd2) == 0)):
            log.error('密码修改失败:无效的参数')
            msg = '操作失败!'
            status = 400
        else:
            if new_pwd1 != new_pwd2:
                log.error('密码修改失败:两次密码输入不一致!')
                msg = '两次输入的密码不一致!'
                status = 400
            else:
                try:
                    member = Members.objects.get(pk=mem_id)
                    member.auth_str = make_pwd(new_pwd1)
                    member.save()
                    msg = '操作成功!'
                    status = 200
                except Exception as e:
                    log.error('密码修改失败:' + str(e))
                    msg = '操作失败!'
                    status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #22
0
def get_qualification(request):
    """
    获取用户资质申请状态
    :param request:
    :return:
    """
    mem_id = request.GET.get('mem_id', '')
    if any((not mem_id, len(mem_id) == 0)):
        log.error('获取用户资质失败:无效的参数')
    else:
        try:
            member = Members.objects.get(pk=mem_id)
            return JsonResponse({
                'status': 200,
                'msg': '查询成功!',
                'data': {
                    'status': member.qualification.name
                }
            })
        except Exception as e:
            log.error('获取用户资质失败:' + str(e))
    return JsonResponse({'status': 400, 'msg': '查询失败!'})
Пример #23
0
def upload_pic(request):
    """
    上传申请资质照片
    :param request:
    :return:
    """
    if request.method == 'POST':
        mem_id = request.POST.get('mem_id', None)
        pic = request.FILES.get('pic', None)
        if any((not pic, not mem_id, len(mem_id) == 0)):
            log.error('照片上传失败:无效的参数')
            msg = '上传失败!'
            status = 400
        else:
            try:
                member_material = MemberMaterials.objects.create(
                    mem_id=mem_id,
                    materials_name='{}_{}'.format(mem_id, pic.name))

                with open(
                        BASE_DIR +
                        '/static/media/pic/{}_{}'.format(mem_id, pic.name),
                        'wb+') as f:
                    for chunk in pic.chunks():
                        f.write(chunk)
                member_material.save()
                return JsonResponse({
                    'status': 200,
                    'msg': '上传成功!',
                    'data': {
                        'pic_url':
                        '/s/media/pic/{}_{}'.format(mem_id, pic.name)
                    }
                })
            except Exception as e:
                log.error('照片上传失败:' + str(e))
                msg = '上传失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #24
0
def add_addr(request):
    """
    添加地址
    :param request:
    :return:
    """
    if request.method == 'POST':
        area = request.POST.get('area', '')
        addr = request.POST.get('addr', '')
        if any((not area, not addr, len(area) == 0, len(addr) == 0)):
            msg = '区域或详细地址不能为空!'
            status = 400
        else:
            try:
                AreaAddr.objects.create(area=area, addr=addr)
                msg = '添加成功'
                status = 200
            except Exception as e:
                log.error('地址添加失败:' + str(e))
                msg = '地址添加失败!'
                status = 400
        return JsonResponse({'status': status, 'msg': msg})
Пример #25
0
def delete_addr(request):
    """
    删除地址
    :param request:
    :return:
    """
    addr_id = request.GET.get('id', '')
    if any((not addr_id, len(addr_id) == 0)):
        log.error('地址删除失败:无效的id')
        msg = '删除失败!'
        status = 400
    else:
        try:
            addr = AreaAddr.objects.get(pk=addr_id)
            addr.delete()
            msg = '删除成功!'
            status = 200
        except Exception as e:
            log.error('地址删除失败:' + str(e))
            msg = '删除失败!'
            status = 400
    return JsonResponse({'status': status, 'msg': msg})
Пример #26
0
def compute_joint_prob(data_mat, state_idx_set, state_val_set):
    num_samples = data_mat.shape[0]
    num_states = data_mat.shape[1]

    if len(state_idx_set) != len(state_val_set):
        log.error('the length of state_set and state_val must be same')
        raise NameError('the length of state_set and state_val must be same')

    joint_idx = set(range(num_samples))
    for k, state_idx in enumerate(state_idx_set):
        samples = data_mat[:, state_idx]
        sub_joint_idx = set([])
        for state_val in state_val_set[k]:
            sub_joint_idx = sub_joint_idx | set(
                np.nonzero(samples == state_val)[0])
        joint_idx = joint_idx & sub_joint_idx
    joint_prob = len(joint_idx) / num_samples

    if num_samples == 0:
        return 0
    else:
        return joint_prob
Пример #27
0
def all_mem_apply_for(request):
    """
    获取所有申请资质的客户
    :param request:
    :return:
    """
    try:
        members = Members.objects.filter(qualification_code=1).order_by('-id')
        paginator = Paginator(members, 8, 0)
        page_num = request.GET.get('page_num', '')
        if page_num:
            if int(page_num) <= paginator.num_pages:
                page = paginator.page(page_num)
            else:
                page = paginator.page(paginator.num_pages)
        else:
            page = paginator.page(1)
        return render(request, 'sys_user/mem_apply_for_mgr.html', {
            'page': page,
            'paginator': paginator
        })
    except Exception as e:
        log.error('查询所有资质待审核客户失败:' + str(e))
Пример #28
0
def get_initiation_mem(request):
    """
    获取所有申请入会的会员
    :param request:
    :return:
    """
    try:
        members = Members.objects.filter(activate=0).order_by('-id')
        paginator = Paginator(members, 8, 0)
        page_num = request.GET.get('page_num', '')
        if page_num:
            if int(page_num) <= paginator.num_pages:
                page = paginator.page(page_num)
            else:
                page = paginator.page(paginator.num_pages)
        else:
            page = paginator.page(1)
        return render(request, 'sys_user/mem_join_mgr.html', {
            'page': page,
            'paginator': paginator
        })
    except Exception as e:
        log.error('获取所有申请入会会员失败:' + str(e))
Пример #29
0
def compact_mgr(request):
    """
    获取所用合同模板
    :param request:
    :return:
    """
    try:
        compact_list = os.listdir(BASE_DIR + '/static/media/compact')
        paginator = Paginator(compact_list, 8, 0)
        page_num = request.GET.get('page_num', '')
        if page_num:
            if int(page_num) <= paginator.num_pages:
                page = paginator.page(page_num)
            else:
                page = paginator.page(paginator.num_pages)
        else:
            page = paginator.page(1)
        return render(request, 'sys_user/compact_mgr.html', {
            'page': page,
            'paginator': paginator
        })
    except Exception as e:
        log.error('获取模板文件列表失败:' + str(e))
Пример #30
0
def search_mem_qualification(request):
    """
    查询客户资质
    :param request:
    :return:
    """
    key = request.GET.get('key', '')
    members = []
    if key:
        try:
            areas = AreaAddr.objects.filter(
                Q(area__contains=key) | Q(addr__contains=key))
            areas_id = [area.id for area in areas]
            members = Members.objects.filter(Q(name__contains=key) | Q(phone=key) | Q(area_id__in=areas_id)) \
                .order_by('-id')
            print('查询结果', members)
        except Exception as e:
            log.error('查询客户资质失败:' + str(e))
    else:
        try:
            members = Members.objects.all().order_by('-id')
        except Exception as e:
            log.error('查询客户资质失败:' + str(e))
    paginator = Paginator(members, 8, 0)
    page_num = request.GET.get('page_num', '')
    if page_num:
        if int(page_num) <= paginator.num_pages:
            page = paginator.page(page_num)
        else:
            page = paginator.page(paginator.num_pages)
    else:
        page = paginator.page(1)
    return render(request, 'sys_user/mem_qualification_search.html', {
        'page': page,
        'paginator': paginator,
        'key': key
    })
Пример #31
0
def to_area_mgr(request):
    """
    查询地址
    :param request:
    :return:
    """
    try:
        areas = AreaAddr.objects.all().order_by('id')

        paginator = Paginator(areas, 8, 0)
        page_num = request.GET.get('page_num', '')
        if page_num:
            if int(page_num) <= paginator.num_pages:
                page = paginator.page(page_num)
            else:
                page = paginator.page(paginator.num_pages)
        else:
            page = paginator.page(1)
        return render(request, 'sys_user/area_mgr.html', {
            'page': page,
            'paginator': paginator
        })
    except Exception as e:
        log.error('地址查询失败:' + str(e))
Пример #32
0
def activate_member(request):
    """
    批准用户会员申请
    :param request:
    :return:
    """
    mem_id = request.GET.get('id', '')
    code = request.GET.get('code', '')
    if any((not mem_id, not code, len(mem_id) == 0, len(code) == 0)):
        log.error('会员激活失败:参数不能为空!')
        msg = '操作失败!'
        status = 400
    else:
        try:
            member = Members.objects.get(pk=mem_id)
            member.activate = code
            member.save()
            msg = '操作成功!'
            status = 200
        except Exception as e:
            log.error('会员激活失败:' + str(e))
            msg = '操作失败!'
            status = 200
    return JsonResponse({'status': status, 'msg': msg})
Пример #33
0
def build_feature_matrix(data_dict, sensor_list, weather_list, time_slots, interpolation=1, max_num_succ_idx_for_itpl=4):

    data_used = sensor_list + weather_list
    log.info('Build data feature matrix now.....')

    if interpolation == 1:
        log.info('Missing samples will be interpolated upto ' + str(max_num_succ_idx_for_itpl) + 'successive time slots')
    else:
        log.info('All time slots with any missing sample will be removed without interpolatoin ')

    num_of_data = len(data_used)
    num_of_samples = len(time_slots)

    # Declare as 2-d list for exception.
    X = list()
    INT_type_list = list()
    FLOAT_type_list = list()
    input_names = list()
    weather_type_idx = list()
    sensor_type_idx = list()
    INT_type_idx = list()
    FLOAT_type_idx = list()
    zero_var_list = list()
    zero_var_val = list()


    # whose variance is zero, hence carry no information,
    # Constrcut X matrix by summerizing hourly samples
    for j, key in enumerate(data_used):
        log.info('-' * 40)
        log.info('building for ' + str(key))

        try:
            num_type = check_data_type(data_dict[key][2][1])

            # Avg. value feature
            x_temp = get_feature(data_dict[key][1], num_type)

            non_inf_idx = np.nonzero(x_temp < np.inf)[0]
            #if non_inf_idx <len(time_slots):measurement_point_set

            # Outlier removal, different parameters for sensors and weather data
            if len(sensor_list) <= j:
                # weather data
                is_weather_data = True
                outlier_idx = outlier_detect(x_temp[non_inf_idx], 5, 10)
            else:
                is_weather_data = False
                outlier_idx = outlier_detect(x_temp[non_inf_idx], 1, 20)

            if len(outlier_idx) > 0:
                log.info('outlier samples are detected: outlier_idx:' + str(outlier_idx))
                x_temp[non_inf_idx[outlier_idx]] = np.inf
            
            # interplolation data, use nearest for int type, use linear for float type
            if interpolation == 1:
                x_temp = interploate_data(x_temp, num_type, max_num_succ_idx_for_itpl)

            norm_data_vec, output_status = normalize_data(x_temp[:, 0])
            if len(np.nonzero(norm_data_vec == np.inf)[0]) > num_of_samples/5:
                raise

        except Exception as e:
            log.error(traceback.print_exc())
            log.error(' Error in processing data feature, excluded from analysis ' + str(e))
            output_status = -1
            norm_data_vec = None

        if output_status == -1:
            zero_var_list.append(key)
            zero_var_val.append(norm_data_vec)
            log.info('too small variance for float type, added to zero var list')

        else:
            input_names.append(key)
            log.info(str(j)+'th sensor update')

            if (num_type == FLOAT_TYPE) and (is_weather_data == False):
                X.append(norm_data_vec)
                FLOAT_type_idx.append(len(X)-1)
                FLOAT_type_list.append(key)

            elif (num_type == INT_TYPE) or (is_weather_data == True):
                X.append(x_temp[:, 0])
                INT_type_idx.append(len(X)-1)
                INT_type_list.append(key)

            else:
                log.error('Sample type must either INT or FLOAT type')
                raise NameError('Sample type must either INT or FLOAT type')

            if key in weather_list:
                weather_type_idx.append(len(X)-1)

            elif key in sensor_list:
                sensor_type_idx.append(len(X)-1)
            else:
                log.error('Sample type must either Weather or Sensor type')
                raise NameError('Sample type must either Weather or Sensor type')

    # Linear Interpolate
    X = np.array(X).T
    if X.shape[0] != num_of_samples:
        log.error('The numeber of rows in feature matrix and the number of the time slots are  different ')
        raise NameError('The numeber of rows in feature matrix and the number of the time slots are  different ')

    if X.shape[1]+len(zero_var_list) != num_of_data:
        log.error('The sume of the numeber of column in feature matrix  and the number of zero var column are  different from the number of input measurements ')
        raise NameError('The sume of the numeber of column in feature matrix  and the number of zero var column are  different from the number of input measurements ')

    deleted_timeslot_idx=[]
    log.info('-' * 20)
    log.info('removing time slots having no sample...')
    inf_idx_set = []
    for col_vec in X.T:
        inf_idx = np.nonzero(col_vec ==np.infty)[0]
        inf_idx_set = np.r_[inf_idx_set, inf_idx]
    inf_col_idx = list(set(list(inf_idx_set)))
    deleted_timeslot_idx = np.array([int(x) for x in inf_col_idx])

    log.info('time slots ' + str(deleted_timeslot_idx) + ' removed...')
    log.info('-' * 20)
    X = np.delete(X, deleted_timeslot_idx, axis=0)
    new_time_slot = np.delete(time_slots, deleted_timeslot_idx)

    # Checking whether it has any ill entry value
    verify_data_mat(X)

    return X, new_time_slot, input_names, zero_var_list, zero_var_val, INT_type_list, INT_type_idx, FLOAT_type_list, FLOAT_type_idx, weather_type_idx, sensor_type_idx
Пример #34
0
def plotting_bldg_bn(bldg):
    plt.ioff()

    log.info('Getting anal_out from ' + bldg.bldg_tag)

    try:
        for sig_tag, anal_out in bldg.anal_out.iteritems():

            for bn_prob in anal_out:

                p_name = bn_prob['p_name']

                try:
                    fig_name = 'BN for Sensors ' + p_name
                    plt.figure(fig_name, figsize=(30.0, 30.0))
                    col_name = bn_prob['s_labels']
                    rbn.nx_plot(bn_prob['s_hc'], col_name, graph_layout='spring', node_text_size=30)
                    plt.savefig(FIG_DIR + bldg.bldg_tag + '_' + p_name + '_' + sig_tag + '_bn_sensors' + get_pngid() + '.png', bbox_inches='tight')
                    plt.close()

                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error('error in ' + fig_name + ' ' + str(e))
                    pass

                try:
                    fig_name = 'BN for Time ' + p_name
                    plt.figure(fig_name, figsize=(30.0,30.0))
                    rbn.nx_plot(bn_prob['t_hc'], bn_prob['t_labels'], graph_layout='spring', node_text_size=30)
                    plt.savefig(FIG_DIR + bldg.bldg_tag + '_' + p_name + '_' + sig_tag + '_bn_time' + get_pngid() + '.png', bbox_inches='tight')
                    plt.close()

                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error('error in ' + fig_name + ' ' + str(e))
                    pass

                try:
                    fig_name = 'BN for Weather ' + p_name
                    plt.figure(fig_name, figsize=(30.0,30.0))
                    rbn.nx_plot(bn_prob['w_hc'], bn_prob['w_labels'], graph_layout='spring', node_text_size=30)
                    plt.savefig(FIG_DIR + bldg.bldg_tag + '_' + p_name + '_' + sig_tag + '_bn_weather' + get_pngid() +'.png', bbox_inches='tight')
                    plt.close()

                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error('error in ' + fig_name + ' ' + str(e))
                    pass

                try:
                    fig_name = 'BN for Sensor-Time-Weather ' + p_name
                    plt.figure(fig_name, figsize=(30.0,30.0))
                    rbn.nx_plot(bn_prob['all_hc'], bn_prob['all_labels'], graph_layout='spring', node_text_size=30)
                    plt.savefig(FIG_DIR + bldg.bldg_tag + '_' + p_name + '_' + sig_tag + '_bn_sensor_time_weather' + get_pngid() + '.png', bbox_inches='tight')
                    plt.close()
                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error('error in ' + fig_name + ' ' + str(e))
                    pass

                try:
                    fig_name = 'BN PEAK LH Analysis for Sensor-Time-Weather ' + p_name
                    plt.figure(fig_name, figsize=(30.0, 30.0))
                    plt.subplot(2, 1, 1)
                    plt.plot(bn_prob['all_cause_symbol_xtick'], bn_prob['high_peak_prob'], '-^')
                    plt.plot(bn_prob['all_cause_symbol_xtick'], bn_prob['low_peak_prob'], '-.v')
                    plt.ylabel('Likelihood', fontsize=20)

                    plt.xticks(bn_prob['all_cause_symbol_xtick'], bn_prob['all_cause_symbol_xlabel'], rotation=270, fontsize=20)
                    plt.tick_params(labelsize=20)
                    plt.legend(('High Peak', 'Low Peak'), loc='center right', prop={'size':25})
                    plt.tick_params(labelsize=20)

                    plt.grid()
                    plt.ylim([-0.05,1.05])
                    plt.title('Likelihood of '+ str(remove_dot(p_name))+' given '+'\n'+str(remove_dot(bn_prob['all_cause_label'])), fontsize=20)
                    plt.savefig(FIG_DIR + bldg.bldg_tag + '_' + p_name + '_' + sig_tag + '_LH_sensor_time_weather' + get_pngid() + '.png', bbox_inches='tight')
                    plt.close()
                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error('error in ' + fig_name + ' ' + str(e))
                    pass

    except Exception as e:
        log.error(traceback.print_exc())
        log.error(str(e))
        pass

    plt.ion()
Пример #35
0
def plotting_bldg_lh(bldg, bldg_key=[], attr='sensor', num_picks=30):
    log.info('-' * 40)
    log.info('plotting lh for ' + attr)
    log.info('-' * 40)
    sig_tag_set = ['avg', 'diff']
    plt.ioff()

    if not len(bldg_key):
        bldg_tag_set = [bldg.bldg_tag]
    else:
        bldg_tag_set = [bldg_key]

    for bldg_tag in bldg_tag_set:
        if bldg_tag == bldg.bldg_tag:
            log.info('-' * 40)
            log.info(bldg_tag + " is to be plotted...")
            log.info('-' * 40)

            for sig_tag in sig_tag_set:
                try:
                    p_names = bldg.sigtags[sig_tag].p_names

                    for pname in p_names:
                        try:
                            blank_idx = pname.index('.')
                            pname = pname.replace('.', '_')
                        except:
                            pass

                        optprob_set = None
                        optstate_set = None
                        for anal in bldg.analysis[sig_tag]:
                            if anal.sensor_tag == pname:
                                optprob_set = anal.attrs[attr].optprob_set
                                optstate_set = anal.attrs[attr].optstate_set
                                break

                        s_names = bldg.sigtags[sig_tag].names[attr]

                        num_picks = 30
                        sort_idx = np.argsort(optprob_set)[::-1]
                        sort_lh = optprob_set[sort_idx[:num_picks]].T
                        sort_state = optstate_set[sort_idx[:num_picks]].T
                        x_label = list(np.array(s_names)[sort_idx[:num_picks]])
                        x_ticks = range(len(x_label))

                        plt.figure(figsize=(20.0, 15.0))
                        plt.subplot(2, 1, 1)
                        plt.plot(sort_lh, '-*')
                        plt.xticks(x_ticks, x_label, rotation=270, fontsize="small")
                        if sig_tag == 'avg':
                            plt.title('Most relavant ' + attr + ' attributes to the peak (demand) of '+ pname, fontsize=20)
                        else:
                            plt.title('Most relavant ' + attr + ' attributes to the peak variations of '+ pname, fontsize=20)
                        plt.tick_params(labelsize='large')
                        plt.ylim([-0.05, 1.05])
                        plt.ylabel('Likelihood (From 0 to 1)', fontsize=18)
                        plt.savefig(FIG_DIR + bldg_tag + '_' + pname + '_' + attr + '_' + sig_tag + '_lh_sensors.png', bbox_inches='tight')
                        plt.close()

                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error(str(e))
                    pass
    plt.close()
    plt.ion()
Пример #36
0
def data_summerization(bldg_key, data_dict, proc_avg=True, proc_diff=True, PARALLEL=False):

    time_slots = data_dict['time_slots'][:]
    conditions_dict = data_dict['Conditions_dict'].copy()
    events_dict = data_dict['Events_dict'].copy()
    sensor_list = data_dict['sensor_list'][:]
    weather_list = data_dict['weather_list'][:]
    weather_list_used = ['TemperatureC', 'Dew PointC', 'Humidity', 'Events', 'Conditions']

    # data_used is the list of refernece name for all measurements from now on.
    data_used = sensor_list + weather_list_used
    # This is a global ID for data_used measurement
    data_used_idx = range(len(data_used))
    sensor_idx = range(len(sensor_list))
    weather_idx = range(len(sensor_list), len(data_used))
    dsout = {'data_dict': data_dict}

    if proc_avg:
        log.info('-' * 40)
        log.info('processing avg.feature..')
        log.info('-' * 40)

        X_Feature, X_Time, X_names, X_zero_var_list, X_zero_var_val, X_int_type_list,\
        X_int_type_idx, X_float_type_list, X_float_type_idx, X_weather_type_idx, X_sensor_type_idx = \
            build_feature_matrix(data_dict, sensor_list, weather_list_used, time_slots, interpolation=1, max_num_succ_idx_for_itpl=int(len(time_slots)*0.05))

        build_feature_matrix_out = \
            {'X_Feature': X_Feature,
             'X_Time': X_Time,
             'X_names': X_names,
             'X_zero_var_list': X_zero_var_list,
             'X_zero_var_val': X_zero_var_val,
             'X_int_type_list': X_int_type_list,
             'X_int_type_idx': X_int_type_idx,
             'X_float_type_list': X_float_type_list,
             'X_float_type_idx': X_float_type_idx,
             'X_weather_type_idx': X_weather_type_idx,
             'X_sensor_type_idx': X_sensor_type_idx}

        build_feature_matrix_out = obj(build_feature_matrix_out)

        if len(X_names+X_zero_var_list) != len(data_used):
            log.error('Missing name is found in X_names or X_zero_var_list')
            raise NameError('Missing name is found in X_names or X_zero_var_list')

        else:
            zero_var_idx = [data_used.index(name_str) for name_str in X_zero_var_list]
            nzero_var_idx = list(set(data_used_idx)-set(zero_var_idx))
        
        if X_Feature.shape[0] > 0:
            # From below all index are reference to X_Feature
            sf_idx = list(set(X_sensor_type_idx)&set(X_float_type_idx))
            # Equivalent to np.array(data_used)[np.array(nzero_var_idx)[sf_idx]]
            sf_name = list(np.array(X_names)[sf_idx])
            si_idx = list(set(X_sensor_type_idx)&set(X_int_type_idx))
            si_name = list(np.array(X_names)[si_idx])
            wf_idx = list(set(X_weather_type_idx)&set(X_float_type_idx))
            wf_name = list(np.array(X_names)[wf_idx])
            wi_idx = list(set(X_weather_type_idx)&set(X_int_type_idx))
            wi_name = list(np.array(X_names)[wi_idx])

            #Euclidian Distance Matrix of Floating type of data only   wf+o
            float_idx = list(set(sf_idx)| set(wf_idx))
            int_idx = list(set(si_idx)| set(wi_idx))

            # Float Type Measurement Clustering
            X_Feature_sfe, sf_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(X_Feature[:, sf_idx], sf_name, corr_bnd=[0.1, 0.9], alg='aff')

            sfe_idx = list(np.array(sf_idx)[exemplars_])
            #plot_label(X_Feature,X_names,labels_,exemplars_,[4,5,6,7])

            # InT Type Measurement Clustering
            X_Feature_sie, si_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(X_Feature[:, si_idx], si_name, corr_bnd=[0.0, 0.9], alg='aff')
            sie_idx = list(np.array(si_idx)[exemplars_])

            # sensor -float type
            sfe_state, sfe_corr_val = x_input_to_states(X_Feature_sfe, CORR_VAL_OUT=1)

            # sensor -integer type
            sie_state = X_Feature_sie

            # weather -float type
            wf_state, wf_corr_val = x_input_to_states(X_Feature[:, wf_idx], CORR_VAL_OUT=1)

            # weather -integer type
            wi_state = X_Feature[:, wi_idx]

            empty_states = np.array([[] for i in range(len(X_Time))])
            if len(sfe_state) == 0:
                sfe_state = empty_states

            if len(sie_state) == 0:
                sie_state = empty_states

            if len(wf_state) ==0:
                wf_state = empty_states

            if len(wi_state) == 0:
                wi_state = empty_states

            # Exemplar sensor only    
            X_Sensor_STATE = np.append(sfe_state,sie_state, axis=1)
            X_Sensor_STATE = X_Sensor_STATE.astype(int)
            X_Sensor_NAMES = list(np.array(X_names)[sfe_idx]) + list(np.array(X_names)[sie_idx])

            X_Weather_STATE = np.append(wf_state,wi_state, axis=1)
            X_Weather_STATE = X_Weather_STATE.astype(int)
            X_Weather_NAMES = list(np.array(X_names)[wf_idx])+list(np.array(X_names)[wi_idx])

            # months of a year,days of a week, and hours of a day
            # (Monday, Tuesday,Wendsday,Thursday,Saturday,Sunday) =(0,1,2,3,4,5,6)
            X_Time_STATE_temp = build_time_states(X_Time)
            X_Time_NAMES_temp = ['MTH', 'WD', 'HR']
            X_Time_STATE = list()
            X_Time_NAMES = list()

            for xt_col, xt_name in zip(X_Time_STATE_temp.T,X_Time_NAMES_temp):
                if len(set(xt_col)) > 1:
                    X_Time_STATE.append(xt_col)
                    X_Time_NAMES.append(xt_name)
            
            X_Time_STATE = np.array(X_Time_STATE).T

            #################################################
            # FORMATTED DATA  FOR REGUALR EVENT
            #################################################
            #DO_PROB_EST=1  ** Save this variables***
            #avgdata_mat = np.hstack([X_Sensor_STATE,X_Weather_STATE,X_Time_STATE])
            #avgdata_names = X_Sensor_NAMES+X_Weather_NAMES+X_Time_NAMES
            avgdata_exemplar = dict(sf_exemplars_dict.items()+si_exemplars_dict.items())
            avgdata_zvar = X_zero_var_list
            
            avgdata_dict = dict()
            avgdata_dict.update({'build_feature_matrix_out': build_feature_matrix_out})

            avgdata_dict.update({'avgdata_state_mat': X_Sensor_STATE})
            avgdata_dict.update({'avgdata_weather_mat': X_Weather_STATE})
            avgdata_dict.update({'avgdata_time_mat': X_Time_STATE})

            avgdata_dict.update({'avg_time_slot': X_Time})
            avgdata_dict.update({'avgdata_exemplar': avgdata_exemplar})
            avgdata_dict.update({'avgdata_zvar': avgdata_zvar})

            avgdata_dict.update({'sensor_names': X_Sensor_NAMES})
            avgdata_dict.update({'weather_names': X_Weather_NAMES})
            avgdata_dict.update({'time_names': X_Time_NAMES})
            dsout.update({'avgdata_dict': avgdata_dict})

    if proc_diff:
        log.info('-' * 40)
        log.info('processing diff.feature..')
        log.info('-' * 40)
        ####################################
        # Irregular Event Extraction
        ####################################
        # Interpolatoin with outlier removal, Here we exclude weather data from irregualr event analysis
        # since weather data noramlly show slow changes in time.so we dont expect in any meaningful diffs values
        measurement_point_set, num_type_set = interpolation_measurement(data_dict, sensor_list, err_rate=1, sgm_bnd=20)

        # Irregualr matrix
        Xdiff_Mat,\
        Xdiff_Time,\
        Xdiff_Names,\
        Xdiff_zero_var_list,\
        Xdiff_zero_var_val,\
        Xdiff_int_type_list,\
        Xdiff_int_type_idx,\
        Xdiff_float_type_list,\
        Xdiff_float_type_idx =\
            build_diff_matrix(measurement_point_set, time_slots, num_type_set, sensor_list, PARALLEL=PARALLEL)

        build_diff_matrix_out = \
            {'Xdiff_Mat':Xdiff_Mat,
             'Xdiff_Time':Xdiff_Time,
             'Xdiff_Names':Xdiff_Names,
             'Xdiff_zero_var_list':Xdiff_zero_var_list,
             'Xdiff_zero_var_val':Xdiff_zero_var_val,
             'Xdiff_int_type_list':Xdiff_int_type_list,
             'Xdiff_int_type_idx':Xdiff_int_type_idx,
             'Xdiff_float_type_list':Xdiff_float_type_list,
             'Xdiff_float_type_idx':Xdiff_float_type_idx}

        build_diff_matrix_out = obj(build_diff_matrix_out)

        if Xdiff_Mat.shape[0] > 0:
            #==============================================================================
            # Restructure diff_marix's and weather matix  for the same common time slot
            #==============================================================================
            time_slots_array = np.sort(np.array(list(set(Xdiff_Time) & set(X_Time))))

            # Extract subset of X_Weather_STATE
            removed_idx_list = list()
            for ridx, slot in enumerate(X_Time):
                slot_idx = np.where(time_slots_array==slot)[0]

                # slot not in common time slots
                if len(slot_idx) == 0:
                    removed_idx_list.append(ridx)

            XDIFF_Weather_STATE = np.delete(X_Weather_STATE, removed_idx_list,axis=0)

            # Extract subset of Xdiff_Mat
            removed_idx_list = list()
            for ridx,slot in enumerate(Xdiff_Time):
                slot_idx = np.where(time_slots_array == slot)[0]

                # slot not in common time slots
                if len(slot_idx) == 0:
                    removed_idx_list.append(ridx)

            Xdiff_Mat = np.delete(Xdiff_Mat, removed_idx_list, axis=0)

            # Update Xdiff_Time
            Xdiff_Time = time_slots_array
            XDIFF_Weather_STATE = np.array(XDIFF_Weather_STATE)    

            # From below all index are reference to X_Feature
            xdiff_sf_idx = Xdiff_float_type_idx
            xdiff_sf_name = Xdiff_float_type_list
            xdiff_si_idx = Xdiff_int_type_idx
            xdiff_si_name = Xdiff_int_type_list

            # Float Type Measurement Clustering
            X_Diff_sfe, sf_diff_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(Xdiff_Mat[:, xdiff_sf_idx], xdiff_sf_name, corr_bnd=[0.1, 0.9])
            xdiff_sfe_idx = list(np.array(xdiff_sf_idx)[exemplars_])

            # InT Type Measurement Clustering
            X_Diff_sie, si_diff_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(Xdiff_Mat[:, xdiff_si_idx], xdiff_si_name, corr_bnd=[0.1, 0.9])
            xdiff_sie_idx = list(np.array(xdiff_si_idx)[exemplars_])

            # sensor -float type
            xdiff_sfe_state, xdiff_sfe_corr_val =\
                x_input_to_states(X_Diff_sfe, CORR_VAL_OUT=1, PARALLEL=PARALLEL)

            # sensor -integer type
            xdiff_sie_state = X_Diff_sie
            empty_states = np.array([[] for i in range(len(Xdiff_Time))])

            if len(xdiff_sfe_state) == 0:
                xdiff_sfe_state = empty_states

            if len(xdiff_sie_state) == 0:
                xdiff_sie_state = empty_states

            if len(wf_state) == 0:
                wf_state = empty_states

            if len(wi_state) == 0:
                wi_state = empty_states

            # Exemplar sensor only    
            XDIFF_Sensor_STATE = np.append(xdiff_sfe_state,xdiff_sie_state, axis=1)
            XDIFF_Sensor_STATE = XDIFF_Sensor_STATE.astype(int)
            XDIFF_Sensor_NAMES = list(np.array(Xdiff_Names)[xdiff_sfe_idx])+list(np.array(Xdiff_Names)[xdiff_sie_idx])

            # months of a year,days of a week, and hours of a day
            # (Monday, Tuesday,Wendsday,Thursday,Saturday,Sunday) =(0,1,2,3,4,5,6)
            XDIFF_Time_STATE_temp = build_time_states(Xdiff_Time)
            XDIFF_Time_NAMES_temp = ['MTH', 'WD', 'HR']
            XDIFF_Time_STATE = list()
            XDIFF_Time_NAMES = list()
            for xt_col, xt_name in zip(XDIFF_Time_STATE_temp.T, XDIFF_Time_NAMES_temp):
                if len(set(xt_col)) > 1:
                    XDIFF_Time_STATE.append(xt_col)
                    XDIFF_Time_NAMES.append(xt_name)
            XDIFF_Time_STATE = np.array(XDIFF_Time_STATE).T

            #################################################
            # FORMATTED DATA  FOR IRREGUALR EVENT
            #################################################
            log.info("FORMATTED DATA  FOR IRREGUALR EVENT")
            #** Save this variables***
            #diffdata_mat = np.hstack([XDIFF_Sensor_STATE,X_Weather_STATE,XDIFF_Time_STATE])
            #diffdata_names = XDIFF_Sensor_NAMES+X_Weather_NAMES+XDIFF_Time_NAMES
            diffdata_exemplar = dict(sf_diff_exemplars_dict.items() + si_diff_exemplars_dict.items())
            diffdata_zvar = Xdiff_zero_var_list

            diffdata_dict = dict()
            diffdata_dict.update({'build_diff_matrix_out': build_diff_matrix_out})

            diffdata_dict.update({'diffdata_state_mat': XDIFF_Sensor_STATE})
            diffdata_dict.update({'diffdata_weather_mat': XDIFF_Weather_STATE})
            diffdata_dict.update({'diffdata_time_mat': XDIFF_Time_STATE})

            diffdata_dict.update({'diff_time_slot': Xdiff_Time})
            diffdata_dict.update({'diffdata_exemplar': diffdata_exemplar})
            diffdata_dict.update({'diffdata_zvar': diffdata_zvar})

            diffdata_dict.update({'sensor_names': XDIFF_Sensor_NAMES})
            diffdata_dict.update({'weather_names': X_Weather_NAMES})
            diffdata_dict.update({'time_names': X_Time_NAMES})

            dsout.update({'diffdata_dict': diffdata_dict})

    dsout.update({'bldg_key': remove_dot(bldg_key)})

    return dsout
Пример #37
0
    def run(self):

        from log_util import log

        try:
            while True:
                cmd = None
                try:
                    cmd = self.cmd_q.get(block=True, timeout=0.1)
                except Exception as e:
                    continue

                finally:
                    if cmd:
                        self.cmd_q.task_done()

                        try:

                            with open(META_DIR + "wip.json", 'w') as f:
                                f.write(simplejson.dumps({"wip": 1}))

                            cmdset = simplejson.loads(cmd)
                            sensor_hash = cmdset['selected-nodes']
                            s_date = datetime.strptime(cmdset['start-date'], '%Y-%m-%d')
                            e_date = datetime.strptime(cmdset['end-date'], '%Y-%m-%d')

                            if not len(sensor_hash):
                                log.critical("No sensor is selected!")
                            else:

                                log.info('****************************** Begining of DDEA *******************************')

                                bldg_key = 'SODA'
                                #exemplar by user
                                #pname_key = '_POWER_'
                                pname_key = 'POWER'

                                s_epoch = int(time.mktime(s_date.timetuple()))
                                e_epoch = int(time.mktime(e_date.timetuple()))
                                time_inv = dt.timedelta(seconds=cmdset['time-interval'])

                                log.info("Cleaning up old output...")

                                mt.remove_all_files(FIG_DIR)
                                mt.remove_all_files(JSON_DIR)
                                mt.remove_all_files(PROC_OUT_DIR)

                                log.info("start epoch : " + str(s_epoch) + " end epoch : " + str(e_epoch))
                                log.info(str(time_inv) + ' time slot interval is set for this data set !!!')
                                log.info("BLDG_KEY : " + bldg_key + " PNAME_KEY : " + pname_key)
                                log.info('*' * 80)

                                log.info("Retrieve sensor data from quasar TSDB")

                                sensor_names_hash = mt.sensor_name_uid_dict(bldg_key, sensor_hash)

                                sensor_data = read_sensor_data(sensor_names_hash, s_epoch, e_epoch)

                                if sensor_data and len(sensor_data):
                                    ddea_process(sensor_names_hash, sensor_data, s_epoch, e_epoch, time_inv, bldg_key, pname_key)
                                else:
                                    log.critical("No sensor data available for time period and sensor selected!")

                                log.info('******************************** End of DDEA **********************************')

                            os.remove(META_DIR + "wip.json")
                            cmd_lock.clear()

                            log.info("execution-lock cleared")
                            log.info('~' * 80)

                        except Exception as e:
                            os.remove(META_DIR + "wip.json")
                            cmd_lock.clear()
                            print e
                            log.error(str(e))

        except Exception as e:
            os.remove(META_DIR + "wip.json")
            cmd_lock.clear()
            print e
            log.error(str(e))

        finally:
            sys.exit(0)
Пример #38
0
def data_summerization(bldg_key, data_dict, proc_avg=True, proc_diff=True, PARALLEL=False):

    time_slots = data_dict['time_slots'][:]
    conditions_dict = data_dict['Conditions_dict'].copy()
    events_dict = data_dict['Events_dict'].copy()
    sensor_list = data_dict['sensor_list'][:]
    weather_list = data_dict['weather_list'][:]
    weather_list_used = ['TemperatureC', 'Dew PointC', 'Humidity', 'Events', 'Conditions']

    # data_used is the list of refernece name for all measurements from now on.
    data_used = sensor_list + weather_list_used
    # This is a global ID for data_used measurement
    data_used_idx = range(len(data_used))
    sensor_idx = range(len(sensor_list))
    weather_idx = range(len(sensor_list), len(data_used))
    dsout = {'data_dict': data_dict}

    if proc_avg:
        log.info('-' * 40)
        log.info('processing avg.feature..')
        log.info('-' * 40)

        X_Feature, X_Time, X_names, X_zero_var_list, X_zero_var_val, X_int_type_list,\
        X_int_type_idx, X_float_type_list, X_float_type_idx, X_weather_type_idx, X_sensor_type_idx = \
            build_feature_matrix(data_dict, sensor_list, weather_list_used, time_slots, interpolation=1, max_num_succ_idx_for_itpl=int(len(time_slots)*0.05))

        build_feature_matrix_out = \
            {'X_Feature': X_Feature,
             'X_Time': X_Time,
             'X_names': X_names,
             'X_zero_var_list': X_zero_var_list,
             'X_zero_var_val': X_zero_var_val,
             'X_int_type_list': X_int_type_list,
             'X_int_type_idx': X_int_type_idx,
             'X_float_type_list': X_float_type_list,
             'X_float_type_idx': X_float_type_idx,
             'X_weather_type_idx': X_weather_type_idx,
             'X_sensor_type_idx': X_sensor_type_idx}

        build_feature_matrix_out = obj(build_feature_matrix_out)

        if len(X_names+X_zero_var_list) != len(data_used):
            log.error('Missing name is found in X_names or X_zero_var_list')
            raise NameError('Missing name is found in X_names or X_zero_var_list')

        else:
            zero_var_idx = [data_used.index(name_str) for name_str in X_zero_var_list]
            nzero_var_idx = list(set(data_used_idx)-set(zero_var_idx))
        
        if X_Feature.shape[0] > 0:
            # From below all index are reference to X_Feature
            sf_idx = list(set(X_sensor_type_idx)&set(X_float_type_idx))
            # Equivalent to np.array(data_used)[np.array(nzero_var_idx)[sf_idx]]
            sf_name = list(np.array(X_names)[sf_idx])
            si_idx = list(set(X_sensor_type_idx)&set(X_int_type_idx))
            si_name = list(np.array(X_names)[si_idx])
            wf_idx = list(set(X_weather_type_idx)&set(X_float_type_idx))
            wf_name = list(np.array(X_names)[wf_idx])
            wi_idx = list(set(X_weather_type_idx)&set(X_int_type_idx))
            wi_name = list(np.array(X_names)[wi_idx])

            #Euclidian Distance Matrix of Floating type of data only   wf+o
            float_idx = list(set(sf_idx)| set(wf_idx))
            int_idx = list(set(si_idx)| set(wi_idx))

            # Float Type Measurement Clustering
            X_Feature_sfe, sf_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(X_Feature[:, sf_idx], sf_name, corr_bnd=[0.1, 0.9], alg='aff')

            sfe_idx = list(np.array(sf_idx)[exemplars_])
            #plot_label(X_Feature,X_names,labels_,exemplars_,[4,5,6,7])

            # InT Type Measurement Clustering
            X_Feature_sie, si_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(X_Feature[:, si_idx], si_name, corr_bnd=[0.0, 0.9], alg='aff')
            sie_idx = list(np.array(si_idx)[exemplars_])

            # sensor -float type
            sfe_state, sfe_corr_val = x_input_to_states(X_Feature_sfe, CORR_VAL_OUT=1)

            # sensor -integer type
            sie_state = X_Feature_sie

            # weather -float type
            wf_state, wf_corr_val = x_input_to_states(X_Feature[:, wf_idx], CORR_VAL_OUT=1)

            # weather -integer type
            wi_state = X_Feature[:, wi_idx]

            empty_states = np.array([[] for i in range(len(X_Time))])
            if len(sfe_state) == 0:
                sfe_state = empty_states

            if len(sie_state) == 0:
                sie_state = empty_states

            if len(wf_state) ==0:
                wf_state = empty_states

            if len(wi_state) == 0:
                wi_state = empty_states

            # Exemplar sensor only    
            X_Sensor_STATE = np.append(sfe_state,sie_state, axis=1)
            X_Sensor_STATE = X_Sensor_STATE.astype(int)
            X_Sensor_NAMES = list(np.array(X_names)[sfe_idx]) + list(np.array(X_names)[sie_idx])

            X_Weather_STATE = np.append(wf_state,wi_state, axis=1)
            X_Weather_STATE = X_Weather_STATE.astype(int)
            X_Weather_NAMES = list(np.array(X_names)[wf_idx])+list(np.array(X_names)[wi_idx])

            # months of a year,days of a week, and hours of a day
            # (Monday, Tuesday,Wendsday,Thursday,Saturday,Sunday) =(0,1,2,3,4,5,6)
            X_Time_STATE_temp = build_time_states(X_Time)
            X_Time_NAMES_temp = ['MTH', 'WD', 'HR']
            X_Time_STATE = list()
            X_Time_NAMES = list()

            for xt_col, xt_name in zip(X_Time_STATE_temp.T,X_Time_NAMES_temp):
                if len(set(xt_col)) > 1:
                    X_Time_STATE.append(xt_col)
                    X_Time_NAMES.append(xt_name)
            
            X_Time_STATE = np.array(X_Time_STATE).T

            #################################################
            # FORMATTED DATA  FOR REGUALR EVENT
            #################################################
            #DO_PROB_EST=1  ** Save this variables***
            #avgdata_mat = np.hstack([X_Sensor_STATE,X_Weather_STATE,X_Time_STATE])
            #avgdata_names = X_Sensor_NAMES+X_Weather_NAMES+X_Time_NAMES
            avgdata_exemplar = dict(sf_exemplars_dict.items()+si_exemplars_dict.items())
            avgdata_zvar = X_zero_var_list
            
            avgdata_dict = dict()
            avgdata_dict.update({'build_feature_matrix_out': build_feature_matrix_out})

            avgdata_dict.update({'avgdata_state_mat': X_Sensor_STATE})
            avgdata_dict.update({'avgdata_weather_mat': X_Weather_STATE})
            avgdata_dict.update({'avgdata_time_mat': X_Time_STATE})

            avgdata_dict.update({'avg_time_slot': X_Time})
            avgdata_dict.update({'avgdata_exemplar': avgdata_exemplar})
            avgdata_dict.update({'avgdata_zvar': avgdata_zvar})

            avgdata_dict.update({'sensor_names': X_Sensor_NAMES})
            avgdata_dict.update({'weather_names': X_Weather_NAMES})
            avgdata_dict.update({'time_names': X_Time_NAMES})
            dsout.update({'avgdata_dict': avgdata_dict})

    if proc_diff:
        log.info('-' * 40)
        log.info('processing diff.feature..')
        log.info('-' * 40)
        ####################################
        # Irregular Event Extraction
        ####################################
        # Interpolatoin with outlier removal, Here we exclude weather data from irregualr event analysis
        # since weather data noramlly show slow changes in time.so we dont expect in any meaningful diffs values
        measurement_point_set,num_type_set = interpolation_measurement(data_dict, sensor_list, err_rate=1, sgm_bnd=20)

        # Irregualr matrix
        Xdiff_Mat,\
        Xdiff_Time,\
        Xdiff_Names,\
        Xdiff_zero_var_list,\
        Xdiff_zero_var_val,\
        Xdiff_int_type_list,\
        Xdiff_int_type_idx,\
        Xdiff_float_type_list,\
        Xdiff_float_type_idx =\
            build_diff_matrix(measurement_point_set, time_slots, num_type_set, sensor_list, PARALLEL=PARALLEL)

        build_diff_matrix_out = \
            {'Xdiff_Mat':Xdiff_Mat,
             'Xdiff_Time':Xdiff_Time,
             'Xdiff_Names':Xdiff_Names,
             'Xdiff_zero_var_list':Xdiff_zero_var_list,
             'Xdiff_zero_var_val':Xdiff_zero_var_val,
             'Xdiff_int_type_list':Xdiff_int_type_list,
             'Xdiff_int_type_idx':Xdiff_int_type_idx,
             'Xdiff_float_type_list':Xdiff_float_type_list,
             'Xdiff_float_type_idx':Xdiff_float_type_idx}

        build_diff_matrix_out = obj(build_diff_matrix_out)

        if Xdiff_Mat.shape[0] > 0:
            #==============================================================================
            # Restructure diff_marix's and weather matix  for the same common time slot
            #==============================================================================
            time_slots_array = np.sort(np.array(list(set(Xdiff_Time) & set(X_Time))))

            # Extract subset of X_Weather_STATE
            removed_idx_list = list()
            for ridx, slot in enumerate(X_Time):
                slot_idx = np.where(time_slots_array==slot)[0]

                # slot not in common time slots
                if len(slot_idx) == 0:
                    removed_idx_list.append(ridx)

            XDIFF_Weather_STATE = np.delete(X_Weather_STATE, removed_idx_list,axis=0)

            # Extract subset of Xdiff_Mat
            removed_idx_list = list()
            for ridx,slot in enumerate(Xdiff_Time):
                slot_idx = np.where(time_slots_array == slot)[0]

                # slot not in common time slots
                if len(slot_idx) == 0:
                    removed_idx_list.append(ridx)

            Xdiff_Mat = np.delete(Xdiff_Mat, removed_idx_list, axis=0)

            # Update Xdiff_Time
            Xdiff_Time = time_slots_array
            XDIFF_Weather_STATE = np.array(XDIFF_Weather_STATE)    

            # From below all index are reference to X_Feature
            xdiff_sf_idx = Xdiff_float_type_idx
            xdiff_sf_name = Xdiff_float_type_list
            xdiff_si_idx = Xdiff_int_type_idx
            xdiff_si_name = Xdiff_int_type_list

            # Float Type Measurement Clustering
            X_Diff_sfe, sf_diff_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(Xdiff_Mat[:, xdiff_sf_idx], xdiff_sf_name, corr_bnd=[0.1, 0.9])
            xdiff_sfe_idx = list(np.array(xdiff_sf_idx)[exemplars_])

            # InT Type Measurement Clustering
            X_Diff_sie, si_diff_exemplars_dict, exemplars_, labels_ = \
                cluster_measurement_points(Xdiff_Mat[:, xdiff_si_idx], xdiff_si_name, corr_bnd=[0.1, 0.9])
            xdiff_sie_idx = list(np.array(xdiff_si_idx)[exemplars_])

            # sensor -float type
            xdiff_sfe_state, xdiff_sfe_corr_val =\
                x_input_to_states(X_Diff_sfe, CORR_VAL_OUT=1, PARALLEL=PARALLEL)

            # sensor -integer type
            xdiff_sie_state = X_Diff_sie
            empty_states = np.array([[] for i in range(len(Xdiff_Time))])

            if len(xdiff_sfe_state) == 0:
                xdiff_sfe_state = empty_states

            if len(xdiff_sie_state) == 0:
                xdiff_sie_state = empty_states

            if len(wf_state) == 0:
                wf_state = empty_states

            if len(wi_state) == 0:
                wi_state = empty_states

            # Exemplar sensor only    
            XDIFF_Sensor_STATE = np.append(xdiff_sfe_state,xdiff_sie_state, axis=1)
            XDIFF_Sensor_STATE = XDIFF_Sensor_STATE.astype(int)
            XDIFF_Sensor_NAMES = list(np.array(Xdiff_Names)[xdiff_sfe_idx])+list(np.array(Xdiff_Names)[xdiff_sie_idx])

            # months of a year,days of a week, and hours of a day
            # (Monday, Tuesday,Wendsday,Thursday,Saturday,Sunday) =(0,1,2,3,4,5,6)
            XDIFF_Time_STATE_temp = build_time_states(Xdiff_Time)
            XDIFF_Time_NAMES_temp = ['MTH', 'WD', 'HR']
            XDIFF_Time_STATE = list()
            XDIFF_Time_NAMES = list()
            for xt_col, xt_name in zip(XDIFF_Time_STATE_temp.T, XDIFF_Time_NAMES_temp):
                if len(set(xt_col)) > 1:
                    XDIFF_Time_STATE.append(xt_col)
                    XDIFF_Time_NAMES.append(xt_name)
            XDIFF_Time_STATE = np.array(XDIFF_Time_STATE).T

            #################################################
            # FORMATTED DATA  FOR IRREGUALR EVENT
            #################################################
            #** Save this variables***
            #diffdata_mat = np.hstack([XDIFF_Sensor_STATE,X_Weather_STATE,XDIFF_Time_STATE])
            #diffdata_names = XDIFF_Sensor_NAMES+X_Weather_NAMES+XDIFF_Time_NAMES
            diffdata_exemplar = dict(sf_diff_exemplars_dict.items() + si_diff_exemplars_dict.items())
            diffdata_zvar = Xdiff_zero_var_list

            diffdata_dict = dict()
            diffdata_dict.update({'build_diff_matrix_out': build_diff_matrix_out})

            diffdata_dict.update({'diffdata_state_mat': XDIFF_Sensor_STATE})
            diffdata_dict.update({'diffdata_weather_mat': XDIFF_Weather_STATE})
            diffdata_dict.update({'diffdata_time_mat': XDIFF_Time_STATE})

            diffdata_dict.update({'diff_time_slot': Xdiff_Time})
            diffdata_dict.update({'diffdata_exemplar': diffdata_exemplar})
            diffdata_dict.update({'diffdata_zvar': diffdata_zvar})

            diffdata_dict.update({'sensor_names': XDIFF_Sensor_NAMES})
            diffdata_dict.update({'weather_names': X_Weather_NAMES})
            diffdata_dict.update({'time_names': X_Time_NAMES})

            dsout.update({'diffdata_dict': diffdata_dict})

    dsout.update({'bldg_key': remove_dot(bldg_key)})

    return dsout
Пример #39
0
def build_diff_matrix(measurement_point_set, time_slots, num_type_set, irr_data_name, conf_lev=0.5, PARALLEL=False):

    #time_slots_utc = dtime_to_unix(time_slots)
    Xdiff = list()
    input_names = list()
    INT_type_list = list()
    FLOAT_type_list = list()
    INT_type_idx = list()
    FLOAT_type_idx = list()
    zero_var_list = list()

    # whose variance is zero, hence carry no information,
    zero_var_val = list()
    num_of_samples = len(time_slots)
    #TIMELET_INV_seconds = (time_slots[1]-time_slots[0]).seconds

    log.info('=' * 40)
    if not PARALLEL:
        for k, (set_val, set_name) in enumerate(zip(measurement_point_set, irr_data_name)):
            log.info(str(irr_data_name[k]))
            try:
                num_type = num_type_set[k]
                diff_mean = get_diff(set_val, time_slots, num_type, conf_lev)
                if num_type == FLOAT_TYPE:
                    #norm_diff_mean,output_status=normalize_data(diff_mean[:,0])
                    norm_diff_mean, output_status = normalize_data(diff_mean)
                elif num_type == INT_TYPE:
                    #num_discrete_vals=len(set(list(diff_mean[:,0])))
                    num_discrete_vals = len(set(list(diff_mean)))
                    log.info('num_discrete_vals : ' + str(num_discrete_vals))

                    if num_discrete_vals > 1:
                        output_status = 0
                        norm_diff_mean = diff_mean
                    else:
                        output_status = -1
                        #norm_diff_mean = list(set(diff_mean[:,0]))
                        norm_diff_mean = list(set(diff_mean))
                else:
                    pass
                if len(np.nonzero(norm_diff_mean == np.inf)[0])>num_of_samples/5:
                    raise 
            except Exception as e:
                log.error(traceback.print_exc())
                log.error('Error in processing data feature, excluded from analysis ' + str(e))
                output_status = -1
                norm_diff_mean = None

            if output_status == -1:
                #zero_var_flag=1
                zero_var_list.append(set_name)
                zero_var_val.append(norm_diff_mean)
                log.warn('too small variance for float type or a single value for int type, added to zero var list')
            else:
                input_names.append(set_name)
                Xdiff.append(norm_diff_mean)

                if num_type == FLOAT_TYPE:
                    FLOAT_type_list.append(set_name)
                    FLOAT_type_idx.append(len(Xdiff)-1)

                elif num_type == INT_TYPE:
                    INT_type_list.append(set_name)
                    INT_type_idx.append(len(Xdiff)-1)

            log.info('-' * 20)
        log.info('-' * 40)

    # PARALLEL ENABLED
    else:
        log.info('Build diff matrix: Parallel enabled...')
        # Construct param list for workers
        param_list = list()
        for k, (set_val, set_name) in enumerate(zip(measurement_point_set, irr_data_name)):
            param_list.append((k, time_slots, conf_lev, set_val, set_name, num_type_set[k]))

        p = mp.Pool(CPU_CORE_NUM)
        ret_dict = dict(p.map(build_diff, param_list))
        p.close()
        p.join()

        for k in sorted(ret_dict.keys()):
            """
            v = ret_dict[k]
            output_status = v[0]
            norm_diff_mean = v[1]
            """

            output_status, norm_diff_mean = ret_dict[k]

            set_name = irr_data_name[k]
            num_type = num_type_set[k]

            if output_status == -1:
                zero_var_list.append(set_name)
                #zero_var_flag=1
                zero_var_val.append(norm_diff_mean)
                log.warn("too small variance for float type or a single value for int type, added to zero var list")
            else:
                input_names.append(set_name)
                try:
                    Xdiff.append(norm_diff_mean)
                except Exception as e:
                    log.error(traceback.print_exc())
                    log.error(str(e))

                if num_type == FLOAT_TYPE:
                    FLOAT_type_list.append(set_name)
                    FLOAT_type_idx.append(len(Xdiff)-1)

                elif num_type == INT_TYPE:
                    INT_type_list.append(set_name)
                    INT_type_idx.append(len(Xdiff)-1)
            log.info('-' * 20)


    Xdiff = np.array(Xdiff).T
    deleted_timeslot_idx = list()
    log.info('-' * 20)
    log.info('removing time slots having no sample...')

    inf_idx_set = list()
    for col_vec in Xdiff.T:
        inf_idx = np.nonzero(col_vec == np.infty)[0]
        inf_idx_set=np.r_[inf_idx_set, inf_idx]
    inf_col_idx = list(set(list(inf_idx_set)))
    deleted_timeslot_idx = np.array([int(x) for x in inf_col_idx]).astype(int)
    log.info('time slots ' + str(deleted_timeslot_idx) + ' removed...')
    log.info('-' * 20)

    Xdiff = np.delete(Xdiff, deleted_timeslot_idx, axis=0)
    new_time_slot = np.delete(time_slots, deleted_timeslot_idx)

    # Checking whether it has any ill entry value
    verify_data_mat(Xdiff)

    log.info('*-' * 20)
    log.info("* deleted_timeslot_idx : " + str(deleted_timeslot_idx))
    log.info('*-' * 20)

    return Xdiff,\
           new_time_slot,\
           input_names,\
           zero_var_list,\
           zero_var_val, \
           INT_type_list,\
           INT_type_idx,\
           FLOAT_type_list,\
           FLOAT_type_idx
Пример #40
0
def state_retrieval(obs, max_num_cluster=6, off_set=0, est_method='kmean', PARALLEL = False):
    log.info('-' * 40)
    log.info('Retrieving discrete states from data using ' + est_method + ' model...')
    log.info('-' * 40)
    log.info('try '+ str(max_num_cluster) + ' clusters..... ')
    score = np.zeros(max_num_cluster)
    model_set = list()

    if not PARALLEL:
        for num_cluster in range(max_num_cluster):
            log.info('Try ' + str(num_cluster+1) + ' clusters ')
            log.info('-----------------------------------')
            if est_method == 'kmean':
                kmean = KMeans(n_clusters=num_cluster+1).fit(obs)
                model_set.append(kmean)
                #score[num_cluster]=-1*np.log(-1*np.sum(kmean.score(obs)))
                #score[num_cluster]=kmean.score(obs)
                #score[num_cluster]=kmean.score(obs)-.5*(num_cluster+1)*1*log10(len(obs))
                #log_ll_val=compute_log_ll(kmean.labels_,obs)
                score[num_cluster] = compute_log_ll(kmean.labels_, obs)

            elif est_method == 'gmm':
                gmm = mixture.GMM(n_components=num_cluster+1).fit(obs)
                model_set.append(gmm)
                score[num_cluster] = np.sum(gmm.score(obs))

            else:
                log.error('not supported est_method')
                raise NameError('not supported est_method')
    else:
        log.info('Parallel enabled...')
        model_set = [0] * max_num_cluster
        score = [0] * max_num_cluster
        params = [(obs, i+1, est_method) for i in range(max_num_cluster)]

        p = Pool(max_num_cluster)
        models = p.map(pp_cluster_state_retrieval, params)
        p.close()
        p.join()

        model_dict = dict(models)
        for k, v in model_dict.iteritems():
            model_set[k] = v[0]
            score[k] = v[1]



    score_err_sum = np.zeros(max_num_cluster)
    log.info('Finding knee points of log likelihood...')

    for i in range(max_num_cluster):
        a_0 = score[:(i)]
        if len(a_0) > 1:
            slope, intercept, r_value, p_value, std_err = stats.linregress(range(len(a_0)),a_0)
            sqr_sum_err0 = sum(((slope*np.arange(len(a_0)) + intercept)-a_0)**2)
        else:
            sqr_sum_err0=0
        a_1 = score[(i):]
        if len(a_1) > 1:
            slope, intercept, r_value, p_value, std_err = stats.linregress(range(len(a_1)),a_1)
            sqr_sum_err1 = sum(((slope*np.arange(len(a_1)) + intercept)-a_1)**2)
        else:
            sqr_sum_err1 = 0
        score_err_sum[i] = sqr_sum_err0 + sqr_sum_err1
    # Optimum number of clusters.
    min_idx = np.argmin(score_err_sum)
    opt_num_cluster = min_idx+1
    log.info('opt_num_cluster: ' + str(opt_num_cluster))

    if est_method == 'kmean':
        label = model_set[min_idx].labels_
    elif est_method == 'gmm':
        label = model_set[min_idx].predict(obs)
    else:
        raise NameError('not supported est_method')
    return label, opt_num_cluster, model_set[min_idx], score, score_err_sum
Пример #41
0
def bn_probability_analysis(bldg_obj, sig_tag='avg'):

    p_name_set = [analysis.sensor_tag for analysis in bldg_obj.analysis[sig_tag]]

    if sig_tag == "avg":
        event = bldg_obj.sigtags['avg'].weather_dict['Event']
        cond = bldg_obj.sigtags['avg'].weather_dict['Cond']

    elif sig_tag == "diff":
        event = bldg_obj.sigtags['diff'].weather_dict['Event']
        cond = bldg_obj.sigtags['diff'].weather_dict['Cond']

    bn_out_set = list()

    for p_name in p_name_set:
        try:
            # bn analysis - Power-Sensor
            s_cause_label, s_labels, s_hc, s_cp_mat, s_bndata_mat = \
                _bn_anaylsis(bldg_obj, p_name, attr='sensor', sig_tag=sig_tag, num_picks_bn=5)

            # bn analysis -Power-Time
            t_cause_label, t_labels, t_hc, t_cp_mat, t_bndata_mat = \
                _bn_anaylsis(bldg_obj, p_name, attr='time', sig_tag=sig_tag, num_picks_bn=10)

            # bn analysis -Power-Weather
            w_cause_label, w_labels, w_hc, w_cp_mat, w_bndata_mat = \
                _bn_anaylsis(bldg_obj, p_name, attr='weather', sig_tag=sig_tag,num_picks_bn=10)

            # bn analysis -Power-Sensor+Time+Weather
            all_cause_label, all_labels, all_hc, all_cp_mat, all_bndata_mat=\
                _bn_anaylsis_all(bldg_obj, p_name, sig_tag=sig_tag, num_picks_bn=20)

            # prob analysis -Power-Sensor+Time+Weather
            cause_label = all_cause_label
            col_labels = all_labels
            effect_label = p_name
            bndata_mat = all_bndata_mat
            low_peak_state, low_peak_prob, high_peak_state, high_peak_prob = \
                _peak_analysis(cause_label, effect_label, col_labels, bndata_mat)

            x_set = low_peak_state
            all_cause_symbol_xlabel = _get_tick_symbol(x_set, all_cause_label, event, cond)
            all_cause_symbol_xtick = range(len(low_peak_state))

            # BN-PROB STORE
            bn_out = \
                {'p_name': p_name,
                 's_cause_label': s_cause_label,
                 's_labels': s_labels,
                 's_hc': s_hc,
                 's_cp_mat': s_cp_mat,
                 's_bndata_mat': s_bndata_mat,
                 't_cause_label': t_cause_label,
                 't_labels': t_labels,
                 't_hc': t_hc,
                 't_cp_mat': t_cp_mat,
                 't_bndata_mat': t_bndata_mat,
                 'w_cause_label': w_cause_label,
                 'w_labels': w_labels,
                 'w_hc': w_hc,
                 'w_cp_mat': w_cp_mat,
                 'w_bndata_mat': w_bndata_mat,
                 'all_cause_label': all_cause_label,
                 'all_labels': all_labels,
                 'all_hc': all_hc,
                 'all_cp_mat': all_cp_mat,
                 'all_bndata_mat': all_bndata_mat,
                 'low_peak_state': low_peak_state,
                 'low_peak_prob': low_peak_prob,
                 'high_peak_state': high_peak_state,
                 'high_peak_prob': high_peak_prob,
                 'all_cause_symbol_xlabel': all_cause_symbol_xlabel,
                 'all_cause_symbol_xtick': all_cause_symbol_xtick}
            bn_out_set.append(bn_out)

        except Exception as e:
            log.error('*** Error in processing bn_prob for ' + p_name + '! ****')
            log.error(traceback.print_exc())
            log.error(str(e))
            pass

    return bn_out_set