#period-dim
     j = pd.DataFrame(finalized)
     j['period_type'] = 'half-hourly'
     j.columns = ['date_time', 'period_type']
     gg = list(range(48))
     #repeating 48 period n times
     j['period'] = pd.Series(np.tile(gg, len(j)))
 except:
     print("check the date_Time range_calculation_function")
 try:
     pr1 = fetch_.date_time_operation(df)
     pr1['date_time'] = pr1['Date'] + " " + pr1['Time']
     pr1.columns = [
         'area_id', 'device_id', 'date', 'time', 'value', 'date_time'
     ]
     Gigar = fetch_.data_loss_finder(pr1, client_id, areaId, j)
     Gigar['client_id'] = np.repeat(client_id, len(Gigar))
     Gigar_2 = Gigar[[
         'client_id', 'area_id', 'device_id', 'date_time', 'date',
         'period_type', 'period', 'value'
     ]]
     Gigar_2.columns = [
         'client_id', 'area_id', 'device_id', 'date_time', 'date',
         'period_type', 'period', 'traffic_count'
     ]
     Gigar_2['created_date'] = pd.to_datetime('now').replace(microsecond=0)
 except:
     print("Error : Check the data")
 try:
     zk = z[z.areaid == "" + areaId + ""]
     data_ass = zk[['clientid', 'clinetname', 'areaid']]
            'created_date'
        ]]
        Gigar_3 = fetch_.optimize_smoothed_error_count_paper(Gigar_2)
        Gigar_4 = fetch_.paper_towel_usage_calculation_optimize(Gigar_3, R)
        #Gigar_5=fetch_.Final_clean(Gigar_3)
    except:
        print("Error : Check the data")

    try:
        Gigar_6 = Gigar_4[[
            'client_id', 'area_id', 'device_id', 'date_time', 'date',
            'raw_value', 'smoothed_value', 'usage', 'created_date'
        ]]
        Gigar_6.date_time = Gigar_6.date_time.astype(str)
        Gigar_6.device_id = Gigar_6.device_id.astype(str)
        ghkl = fetch_.data_loss_finder(Gigar_6, client_id, areaId, j)
        #ghkl_.reset_index(inplace=True)
        result = fetch_.usage_gap_distribution(ghkl, ab)
        result_ = result[result.device_id != 'nan']
        result_.smoothed_value = np.where(result_.smoothed_value > 100, 0,
                                          result_.smoothed_value)
        result_['usage'] = np.where(
            result_['date_time'] == '2019-05-01 00:00:00', 0, result_.usage)
    except:
        print("enter dedata")
    try:
        database_username = '******'
        database_password = '******'
        database_ip = '****'
        database_name = '****'
        database_connection = sqlalchemy.create_engine(
         'period_type', 'period', 'traffic_count', 'created_date'
     ]]
     Gigar_3 = fetch_.Final_clean_People_count(Gigar_2)
     Gigar_3['traffic_count'] = Gigar_3['traffic_count'].astype(int)
     Final = Gigar_3[[
         'client_id', 'area_id', 'device_id', 'date_time', 'date',
         'traffic_count', 'created_date'
     ]]
     j['date_time'] = j['date_time'].astype(str)
     grpd_traff = pd.DataFrame(
         Final.groupby(['date_time'])['traffic_count'].sum())
     grpd_traff.reset_index(inplace=True)
     uniqs = Final.device_id.unique()
     kj = ""
     grpd_traff['device_id'] = kj.join(uniqs[0])
     data_loss_traffic = fetch_.data_loss_finder(grpd_traff, client_id,
                                                 areaId, j)
     Gigar_fin = data_loss_traffic[[
         'client_id', 'area_id', 'device_id', 'date_time', 'date',
         'period_type', 'period', 'traffic_count', 'created_date'
     ]]
     print(j.shape, Gigar_fin.shape)
     print(Gigar_fin.isnull().sum())
 except:
     print("Error : Check the data", areaId)
 try:
     zk = z[z.areaid == "" + areaId + ""]
     data_ass = zk[['clientid', 'clinetname', 'areaid']]
     data_ass['devicetype'] = 'PeopleCount'
     data_ass['start_date'] = np.min(df['Date'])
     data_ass['end_date'] = np.max(df['Date'])
     data_ass['device_count'] = df.deviceName.nunique()