Exemplo n.º 1
0
def sanity_check_counts():
    """ Execute the sanity check queries and report the failed cases
    Args:
    No Arguments
    Returns:
    No return Arguments
    """
    subtaskid = createSubTask("sanity check queries", getMaintaskId())
    try:
        conn = dbConnection()
        cur = conn.cursor()
        cur.execute("select query, name, expected_value from birepusr.etl_data_sanity_queries where project_name='planview'")
        rows = cur.fetchall()
        for row in rows:
            conn = dbConnection()
            cur = conn.cursor()
            cur.execute(row[0])
            x = cur.fetchall()
            if x == []:
                actual_value = 0
            else:
                actual_value = x[0][0]

            condition = row[1]
            expected_value = row[2]
            status = 'Success'
            if not expected_value == actual_value:
                status = 'Fail'
                message = '''Expected vs Actual values not matching for check '{0}': expected {1} rows but found {2}'''.format(condition, expected_value, actual_value)
                send_custom_mail('Service-Desk ETL : DATA SANITY TEST FAILED', message)

            executeQuery('''insert into birepusr.etl_data_sanity_test_results(condition, created_date, expected_value, actual_value, status, task_id,project_name) values ('{0}', now(), {1}, {2}, '{3}', '{4}','planview')'''.format(condition, expected_value, actual_value, status, subtaskid))
        updateSubTask(subtaskid, "SUCCESS")
    except (Exception, psycopg2.Error) as error:
        insertErrorLog(subtaskid, error)
Exemplo n.º 2
0
def init_timesheet_insert(timesheet_entry,entry_flag):
    """
    This function initiate Planview WS API call to insert timesheet entry for given user. 

    Args:
    DataFrame: pandas dataframe with timehseet details for all users for latest sprints
    String: Identifier to distingish fresh timehseet entry or retry attempt of failed timesheet

    Returns:
    No Return
    """
    if not timesheet_entry.empty:
        df_ts = timesheet_entry.groupby(by=['timesheetId','userId'], sort=False).apply(lambda x: x.to_dict(orient='records'))
        df_ts = df_ts.drop(columns="userId")
        for row in df_ts.to_list():
            user_ts_list = {'timesheetId':row[0]['timesheetId'], 'userId': int(row[0]['userId'])}
            user = int(row[0]['userId'])
            timesheet = row[0]['timesheetId']
            for r in row: del r['userId']
            user_ts_list['entriesProject']=row
            try:
                if entry_flag == 'New':
                    planview.insertTimesheet(user_ts_list)
                else:
                    #TO DO optimize this code to perform bul operation at end
                    planview.insertTimesheet(user_ts_list)
                    conn = dbConnection()
                    cur = conn.cursor()
                    cur.execute('''delete from idw.planview_failed_timesheetID where timesheetId = {0} and userId = {1}'''.format(timesheet,user))
                    conn.commit()
            except Exception as e:
                #TO DO: add valid timesheet record and user details in email body
                send_custom_mail("Failed to populate timesheet in planview", e)
                x1 = timesheet_entry[(timesheet_entry.timesheetId == timesheet) & (timesheet_entry.userId == user)]
                insertFailedEntries(x1)
Exemplo n.º 3
0
def createDf(df, temp_table, entity_name):
    """
    This function insert entity data in temporary table and call function 
    to process data for respective entity using newly created table.

    Args:
    dataframe : take dataframe with source data as input 
    string    : temporary table inwhile data frame will get persisted
    string    : entity name for which data is being loaded and processed

    Returns:
    No Return
    """
    conn = dbConnection()
    cur = conn.cursor()
    cur.execute('''{0}'''.format(temp_table))
    output = io.StringIO()
    df = df.replace('None', '')
    df.to_csv(output, sep='|', header=True, index=False)
    output.seek(0)
    copy_query = "COPY pv_entity_temp_table FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
    cur.copy_expert(copy_query, output)
    proc_name = ("idw.planview_" + entity_name).lower()
    cur.callproc(proc_name)
    conn.commit()
Exemplo n.º 4
0
 def test_max_time_spent_by_user(self):
     # check maximum hours spent by any users
     conn = dbConnection()
     cur = conn.cursor()
     cur.execute(
         """select (sum(time_spent)/ actual_sprint_days):: numeric(10) as hr
     from idw.task_time_spent_fact f inner join
     idw.jira_sprint_summary ss
     on f.sprint_number=ss.sprint_number and f.team_dim_id=ss.team_dim_id
     where f.sprint_number=2448 and portfolio_dim_id is not null
     group by f.team_dim_id,f.sprint_number,ss.actual_sprint_days""")
     rows = cur.fetchall()
     summary = round(rows[0][0])
     self.assertEqual(summary, 8)
Exemplo n.º 5
0
def loadEntityData():
    subtaskid = createSubTask("Load planview data into tables",
                              getMaintaskId())
    try:
        conn = dbConnection()
        cur = conn.cursor()
        cur.execute(
            "select entity_ppm_id from idw.planview_entity_dim order by load_sequence"
        )
        rows1 = cur.fetchall()
        # rows1 = [5]
        for row in rows1:
            ppm_id = row[0]
            #ppm_id=row
            cur.execute(
                "select column_select_list from idw.planview_entity_dim where entity_ppm_id={0}"
                .format(ppm_id))
            rows = cur.fetchall()
            json_dict = rows[0][0].values()
            get_list = ",".join([str(e) for e in json_dict])
            mList = [int(e) if e.isdigit() else e for e in get_list.split(",")]
            json_keys = rows[0][0].keys()
            get_Cols = ",".join([str(e) for e in json_keys])
            column_list = [
                int(e) if e.isdigit() else e for e in get_Cols.split(",")
            ]
            data = getData(ppm_id, mList)
            df = data['methodValues']
            print(df)
            test_df(df, column_list)
            ##df.apply(lambda row: getPlanviewData(row, column_list, ppm_id))
            createDf(ppm_id, dfObj, temp_table)
            print(dfObj)
            dfObj.drop(columns=column_list, inplace=True)
            df.drop(df.index, inplace=True)
        updateSubTask(subtaskid, "SUCCESS")
    except (Exception) as error:
        insertErrorLog(subtaskid, error)

    subtaskid = createSubTask("Load planview data into tables",
                              getMaintaskId())
    try:

        updateSubTask(subtaskid, "SUCCESS")
    except (Exception) as error:
        insertErrorLog(subtaskid, error)
Exemplo n.º 6
0
 def test_fetch_timesheet_entries(self):
     planview_entities = executeQueryAndReturnDF(
         "select string_agg(res.resource_ppm_id::varchar(15), ', ') AS user_list,'2020/01/01' as min from (select r.resource_ppm_id,r.resource_dim_id from idw.user_team_map ut inner join idw.team_dim t on ut.team_dim_id=t.team_dim_id inner join idw.planview_resource_dim r ON ut.user_dim_id = r.resource_dim_id where is_reportable=true) res"
     )
     conn = dbConnection()
     cur = conn.cursor()
     for index, row in planview_entities.iterrows():
         users = row['user_list']
         if users is not None:
             mList = [
                 int(e) if e.isdigit() else int(e) for e in users.split(',')
             ]
         else:
             raise Exception("User list is Empty")
         startDate = row['min']
         sheet = planview.getTimesheet_users(mList, startDate)
         self.assertTrue(len(sheet.index))
         break
Exemplo n.º 7
0
 def test_time_spent_by_team(self):
     # check number of developers
     conn = dbConnection()
     cur = conn.cursor()
     cur.execute("""select sum(per_day_time_spent)::numeric(10) as hr
     from idw.planview_get_timesheet_entries('2448')
     group by team_name, sprint_name;""")
     rows = cur.fetchall()
     summary = round(rows[0][0])
     cur.execute(
         """select (sum(time_spent)* number_of_devs)::numeric(10) as hr
     from idw.task_time_spent_fact f inner join 
     idw.jira_sprint_summary ss 
     on f.sprint_number=ss.sprint_number and f.team_dim_id=ss.team_dim_id
     where f.sprint_number=2448 and portfolio_dim_id is not null
     group by f.team_dim_id,f.sprint_number,ss.number_of_devs;""")
     row1 = cur.fetchall()
     issues = round(row1[0][0])
     self.assertEqual(summary, issues)
Exemplo n.º 8
0
def insertFailedEntries(entry):
    """
    This function insert failed timesheets in idw.planview_failed_timesheetID table 
    so that we can retry them at later point

    Args:
    Dataframe: Dataframe with timesheet object which failed while populating data in planview

    Returns:
    No Return
    """
    conn = dbConnection()
    cur = conn.cursor()
    cur.execute('''create temp table timesheet_entry (companyId  int,entryTypeId int,timesheetId bigint,userId bigint,level1Id bigint,level2Id bigint,entryDate date,entryId bigint,level3Id bigint,entryHours numeric) ''')
    output = io.StringIO()
    entry.to_csv(output, sep='|', header=True, index=False)
    output.seek(0)
    copy_query = "COPY timesheet_entry FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
    cur.copy_expert(copy_query, output)
    cur.execute('''insert into idw.planview_failed_timesheetID(companyId,entryTypeId,timesheetId,userId,level1Id,level2Id,entryDate,entryId,level3Id,entryHours,created_date) select companyId,entryTypeId,timesheetId,userId,level1Id,level2Id,entryDate,entryId,level3Id,COALESCE(entryHours, 0) entryHours,now() as created_date from timesheet_entry where concat(timesheetid,userid) not in (select concat(timesheetid,userid) from idw.planview_failed_timesheetID)''')
    conn.commit()
Exemplo n.º 9
0
def createDf(ppm_id, df, temp_table):
    subtaskid = createSubTask(
        "Create temp table to load the planview data into db tables",
        getMaintaskId())
    try:
        conn = dbConnection()
        cur = conn.cursor()
        cur.execute('''{0}'''.format(temp_table))
        output = io.StringIO()
        df.to_csv(output, sep='|', header=True, index=False)
        output.seek(0)
        copy_query = "COPY temp_table FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
        cur.copy_expert(copy_query, output)
        cur.execute(
            '''select entity_name from idw.planview_entity_dim where entity_ppm_id = {0}'''
            .format(ppm_id))
        ppm = cur.fetchall()
        proc_name = ("idw.proc_" + ppm[0][0]).lower()
        cur.callproc(proc_name)
        conn.commit()
        updateSubTask(subtaskid, "SUCCESS")
    except (Exception) as error:
        insertErrorLog(subtaskid, error)
Exemplo n.º 10
0
def timesheet_entry_user():
    """
    This function process generates data using timesheet and sprint data, 
    to insert timesheet entry into planview. 

    Args:
    No Arguments

    Returns:
    No Return
    """
    subtaskid = al.createSubTask(
        "Populate timesheet for latest sprints in planview",
        al.getMaintaskId())
    try:
        #Fetch user ids for processing
        planview_entities = executeQueryAndReturnDF(dq.get_userlist_to_populate_timesheet)
        users = planview_entities['user_list']
        ts_columns = {'companyid':'companyId','entrytypeid':'entryTypeId','timesheetid':'timesheetId','userid':'userId', 'level1id':'level1Id','level2id':'level2Id','entrydate':'entryDate','entryid':'entryId','level3id':'level3Id','entryhours':'entryHours'}

        if users[0] is not None:    
            mList = [int(e) if e.isdigit() else int(e) for e in users[0].split(',')]
        
            startDate = planview_entities['min']
            #Fetch timesheet ids for above user list
            sheet = planview.getTimesheet_users(mList,startDate[0])

            column_names = ['timesheetId','userId','startDate','endDate','entryId','entryDate','entryHours','project_ppm_id','task_ppm_id']

            user_timesheet_data = pd.DataFrame(columns=column_names)

            for tsrow in sheet.itertuples(index=True, name='Pandas'):
                if not tsrow.entriesProject:
                    user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId,'userId':tsrow.userId, 'startDate':tsrow.startDate, 'endDate':tsrow.endDate,'entryId': None,'entryDate': None,'entryHours': None,'project_ppm_id': None,'task_ppm_id': None}, ignore_index=True)
                else:
                    for entry in tsrow.entriesProject:
                        user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId, 'userId':tsrow.userId, 'startDate':tsrow.startDate, 'endDate':tsrow.endDate,'entryId': entry['entryId'],'entryDate': entry['entryDate'],'entryHours': entry['entryHours'],'project_ppm_id': entry['level1Id'],'task_ppm_id': entry['level2Id']}, ignore_index=True)
            
            sprint_number = executeQueryAndReturnDF(dq.get_sprints_for_timesheet)
            sprint_list = sprint_number['sprint_number'][0]
            conn = dbConnection()
            cur = conn.cursor()
            #export timesheet details into temporary table
            cur.execute('''create temp table timesheet_data (timesheetId bigint, 
            userId bigint, startDate date, endDate date, entryId bigint, 
            entryDate date,entryHours numeric, project_ppm_id bigint, task_id bigint) ''')
            output = io.StringIO()
            user_timesheet_data.to_csv(output, sep='|', header=True, index=False)
            output.seek(0)
            copy_query = "COPY timesheet_data FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
            cur.copy_expert(copy_query, output)
            
            #Joining timesheet data and sprint data
            query = dq.timesheet_population_query.format(sprint_list)

            timesheet_entry =  pd.read_sql_query(query, coerce_float=False, con=conn)

            conn.commit()
            timesheet_entry.rename(columns=ts_columns,inplace=True)
            init_timesheet_insert(timesheet_entry,'New')
            cur.execute('''update idw.jira_sprint_summary set is_timesheet_populated = true where sprint_number in ({0})'''.format(sprint_list))
            conn.commit()
        else:
            print("User list is Empty")

        #Processing failed timesheet entries
        failed_entries_df = executeQueryAndReturnDF("select companyId,entryTypeId,timesheetId,userId,level1Id,level2Id,to_char(entryDate,'YYYY/MM/DD') entryDate,entryId,level3Id,entryHours from idw.planview_failed_timesheetID")
        failed_entries_df.rename(columns=ts_columns,inplace=True)
        init_timesheet_insert(failed_entries_df,'Existing')
        al.updateSubTask(subtaskid, "SUCCESS")
    except Exception as error:
        al.insertErrorLog(subtaskid, error)
Exemplo n.º 11
0
def delete_entries():
    subtaskid = al.createSubTask("Delete timesheet entries for a team",
                                 al.getMaintaskId())
    try:
        timesheet_team = executeQueryAndReturnDF(dq.delete_timesheet_entries)

        for team in timesheet_team.itertuples(index=True, name='Pandas'):
            print(team.resource_list)
            users = team.resource_list
            if users is not None:
                mList = [
                    int(e) if e.isdigit() else int(e) for e in users.split(',')
                ]

            team_id = team.team_dim_id
            startDate = team.from_date
            endDate = team.to_date
            #Fetch timesheet ids for above user list
            sheet = planview.getTimesheet_users(mList, startDate)

            column_names = [
                'timesheetId', 'userId', 'startDate', 'endDate', 'entryId',
                'entryDate', 'entryHours', 'project_ppm_id', 'task_ppm_id'
            ]

            user_timesheet_data = pd.DataFrame(columns=column_names)

            for tsrow in sheet.itertuples(index=True, name='Pandas'):
                if not tsrow.entriesProject:
                    user_timesheet_data = user_timesheet_data.append(
                        {
                            'timesheetId': tsrow.timesheetId,
                            'userId': tsrow.userId,
                            'startDate': tsrow.startDate,
                            'endDate': tsrow.endDate,
                            'entryId': None,
                            'entryDate': None,
                            'entryHours': None,
                            'project_ppm_id': None,
                            'task_ppm_id': None
                        },
                        ignore_index=True)
                else:
                    for entry in tsrow.entriesProject:
                        user_timesheet_data = user_timesheet_data.append(
                            {
                                'timesheetId': tsrow.timesheetId,
                                'userId': tsrow.userId,
                                'startDate': tsrow.startDate,
                                'endDate': tsrow.endDate,
                                'entryId': entry['entryId'],
                                'entryDate': entry['entryDate'],
                                'entryHours': entry['entryHours'],
                                'project_ppm_id': entry['level1Id'],
                                'task_ppm_id': entry['level2Id']
                            },
                            ignore_index=True)

            print(user_timesheet_data.to_string())
            #export timesheet details into temporary table
            conn = dbConnection()
            cur = conn.cursor()
            cur.execute(
                '''create temp table timesheet_data_delete (timesheetId bigint, 
            userId bigint, startDate date, endDate date, entryId bigint, 
            entryDate date,entryHours numeric, project_ppm_id bigint, task_id bigint) '''
            )
            output = io.StringIO()
            user_timesheet_data.to_csv(output,
                                       sep='|',
                                       header=True,
                                       index=False)
            output.seek(0)
            copy_query = "COPY timesheet_data_delete FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
            cur.copy_expert(copy_query, output)
            query = '''select timesheetid,userid,string_agg(entryid::varchar(15), ', ') AS entryid from timesheet_data_delete where entrydate between '{0}' and '{1}' group by timesheetid,userid'''.format(
                startDate, endDate)
            entries = pd.read_sql_query(query, con=conn)
            print(entries)
            conn.commit()

            for i in entries.itertuples(index=True, name='Pandas'):
                print(i.timesheetid)
                print(i.entryid)
                entries = i.entryid
                if entries is not None:
                    entry_List = [
                        int(e) if e.isdigit() else int(e)
                        for e in entries.split(',')
                    ]

                print(entry_List)

                timesheet_id = i.timesheetid
                #delete timesheet entries for timesheetid
                planview.deleteTimesheet_entries(timesheet_id, entry_List)
            cur.execute(
                '''update idw.timesheet_refresh_log set is_timesheet_refreshed = true where team_dim_id = {0}'''
                .format(team_id))
            conn.commit()

    except (Exception) as error:
        al.insertErrorLog(subtaskid, error)
Exemplo n.º 12
0
def timesheet_entry_all_user():
    """
    This function pull timesheet entries from planview for users and insert into planview_timesheet_entries table 

    Args:
    No Arguments

    Returns:
    No Return
    """
    subtaskid = al.createSubTask(
        "Pull timesheet entries from planview for users and insert into planview_timesheet_entries table",
        al.getMaintaskId())
    try:
        #Fetch user ids for processing
        planview_entities = executeQueryAndReturnDF(dq.get_user_list_to_pull_timesheet_entries)
        conn = dbConnection()
        cur = conn.cursor()
        for index, row in planview_entities.iterrows():
            users = row['user_list']
            if users is not None: 
                mList = [int(e) if e.isdigit() else int(e) for e in users.split(',')]
            else:
                raise Exception("User list is Empty")
            startDate = row['min']

            #Fetch timesheet entries for user list
            sheet = planview.getTimesheet_users(mList,startDate)

            column_names = ['timesheetId', 'companyId', 'entryDate', 'entryHours', 'entryId', 'entryTypeId', 'externalId', 'internalRate', 'billableRate', 'isBillable', 'isProductive', 'level1Id', 'level2Id', 'level3Id', 'locationId', 'notes', 'state', 'userId']
            user_timesheet_data = pd.DataFrame(columns=column_names)

            for tsrow in sheet.itertuples(index=True, name='Pandas'):
            
                for entry in tsrow.entriesProject:
                    user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId,'companyId': entry['companyId'], 'entryDate': entry['entryDate'],'entryHours': entry['entryHours'],'entryId': entry['entryId'],'entryTypeId': entry['entryTypeId'] ,'externalId': entry['externalId'] ,'internalRate': entry['internalRate'] ,'billableRate': entry['billableRate'] ,'isBillable': entry['isBillable'] ,'isProductive': entry['isProductive'] ,'level1Id': entry['level1Id'],'level2Id': entry['level2Id'],'level3Id': entry['level3Id'],'locationId': entry['locationId'],'notes': entry['notes'],'state': entry['state'],'userId':tsrow.userId}, ignore_index=True)
                for entry1 in tsrow.entriesOther:
                    user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId,'companyId': entry1['companyId'], 'entryDate': entry1['entryDate'],'entryHours': entry1['entryHours'],'entryId': entry1['entryId'],'entryTypeId': entry1['entryTypeId'] ,'externalId': entry1['externalId'] ,'internalRate': entry1['internalRate'] ,'billableRate': entry1['billableRate'] ,'isBillable': entry1['isBillable'] ,'isProductive': entry1['isProductive'] ,'level1Id': entry1['level1Id'],'level2Id': entry1['level2Id'],'level3Id': entry1['level3Id'],'locationId': entry1['locationId'],'notes': entry1['notes'],'state': entry1['state'],'userId':tsrow.userId}, ignore_index=True)
                for entry2 in tsrow.entriesAccount:
                    user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId,'companyId': entry2['companyId'], 'entryDate': entry2['entryDate'],'entryHours': entry2['entryHours'],'entryId': entry2['entryId'],'entryTypeId': entry2['entryTypeId'] ,'externalId': entry2['externalId'] ,'internalRate': entry2['internalRate'] ,'billableRate': entry2['billableRate'] ,'isBillable': entry2['isBillable'] ,'isProductive': entry2['isProductive'] ,'level1Id': entry2['level1Id'],'level2Id': entry2['level2Id'],'level3Id': entry2['level3Id'],'locationId': entry2['locationId'],'notes': entry2['notes'],'state': entry2['state'],'userId':tsrow.userId}, ignore_index=True)
                for entry3 in tsrow.entriesPortfolio:
                    user_timesheet_data = user_timesheet_data.append({'timesheetId': tsrow.timesheetId,'companyId': entry3['companyId'], 'entryDate': entry3['entryDate'],'entryHours': entry3['entryHours'],'entryId': entry3['entryId'],'entryTypeId': entry3['entryTypeId'] ,'externalId': entry3['externalId'] ,'internalRate': entry3['internalRate'] ,'billableRate': entry3['billableRate'] ,'isBillable': entry3['isBillable'] ,'isProductive': entry3['isProductive'] ,'level1Id': entry3['level1Id'],'level2Id': entry3['level2Id'],'level3Id': entry3['level3Id'],'locationId': entry3['locationId'],'notes': entry3['notes'],'state': entry3['state'],'userId':tsrow.userId}, ignore_index=True)
            
                user_timesheet_data['isBillable'] = user_timesheet_data['isBillable'].astype('bool')
                user_timesheet_data['isProductive'] = user_timesheet_data['isProductive'].astype('bool')

            #export timesheet details into temporary table
            cur.execute('''drop table if exists timesheet_data;''')
            cur.execute('''create temp table timesheet_data (timesheetid numeric, companyid numeric, entrydate date, entryhours numeric, entryid numeric, entrytypeid numeric, externalid varchar(100), internalrate numeric, billablerate numeric, isbillable boolean, isproductive boolean, level1id numeric, level2id numeric, level3id numeric, locationid numeric, notes text, state text, userid numeric);''')
            output = io.StringIO()
            user_timesheet_data.to_csv(output, sep='~', header=True, index=False)
            output.seek(0)
            copy_query = "COPY timesheet_data FROM STDOUT csv DELIMITER '~' NULL ''  ESCAPE '\\' HEADER "
            cur.copy_expert(copy_query, output)
            conn.commit()
            cur.execute(dq.update_is_active_flag_for_ts_entries)
            cur.execute(dq.insert_timesheet_entries)
            cur.execute(dq.update_timesheet_dim_ids)
            conn.commit()
        al.updateSubTask(subtaskid, "SUCCESS")
    except Exception as error:
        print(error)
        send_custom_mail("Failed to fetch timesheet entries from planview", error)
        al.insertErrorLog(subtaskid, error)
Exemplo n.º 13
0
def timesheet_entry_user():

    #Fetch user ids for processing
    planview_entities = executeQueryAndReturnDF(
        "select string_agg(a1.resource_ppm_id::varchar(15), ', ') AS user_list, to_char(min(a1.sprint_start_date), 'YYYY/MM/DD') as min from ( SELECT d.resource_ppm_id, min(a.sprint_start_date) as sprint_start_date FROM idw.jira_sprint_summary a inner join idw.user_team_map c ON a.team_dim_id = c.team_dim_id inner join idw.planview_resource_dim d ON c.user_dim_id = d.resource_dim_id AND c.team_entry_date < a.sprint_start_date AND COALESCE(c.team_exit_date::timestamp with time zone, now()) > a.sprint_end_date where coalesce(a.is_timehseet_populated,false)=false and a.sprint_end_date > '2020-05-14' and a.sprint_end_date <= current_date group by d.resource_ppm_id ) a1"
    )
    users = planview_entities['user_list']
    mList = [int(e) if e.isdigit() else int(e) for e in users[0].split(',')]
    startDate = planview_entities['min']
    #Fetch timesheet ids for above user list
    sheet = getTimesheet_users(mList, startDate[0])
    #print(row['entryHours'],row['entryDate'],row['timesheetId'])

    timesheet_data = sheet[['timesheetId', 'userId', 'startDate', 'endDate']]
    #print(timesheet_data)
    conn = dbConnection()
    cur = conn.cursor()
    cur.execute(
        '''create temp table timesheet_entry (entryId bigint, timesheetId bigint, entryDate date, entryHours numeric) '''
    )
    l = sheet['entriesProject']
    for row in l:
        for x in row:
            cur.execute(
                '''insert into timesheet_entry(entryId,timesheetId,entryDate,entryHours) values ({0},{1},'{2}','{3}')'''
                .format(x['entryId'], x['timesheetId'], x['entryDate'],
                        x['entryHours']))
            conn.commit()

    #Creating temporary table to store timesheet data
    cur.execute(
        '''create temp table timesheet_data (timesheetId bigint, userId bigint, startDate date, endDate date) '''
    )
    output = io.StringIO()
    timesheet_data.to_csv(output, sep='|', header=True, index=False)
    output.seek(0)
    copy_query = "COPY timesheet_data FROM STDOUT csv DELIMITER '|' NULL ''  ESCAPE '\\' HEADER "
    cur.copy_expert(copy_query, output)
    conn.commit()
    #Fetch list of sprint numbers
    sprint_number = executeQueryAndReturnDF(
        "SELECT string_agg(sprint_number::varchar(15), ', ') AS sprint_number from (select distinct sprint_number FROM idw.jira_sprint_summary a where coalesce(a.is_timehseet_populated,false)=false and a.sprint_end_date > '2020-05-14' and a.sprint_end_date <= current_date)x1"
    )
    sprint_list = sprint_number['sprint_number'][0]
    print(sprint_list)

    #Joining timesheet data and sprint data
    #query = '''select pd.*, td.timesheetId::bigint timesheetId, td.entryid, td.entrydate from (select  resource_ppm_id::bigint,to_char(date_of_sprint, 'YYYY/MM/DD') date_of_sprint,per_day_time_spent,project_ppm_id::bigint,task_ppm_id::bigint,resource_role_id::bigint from idw.planview_get_timesheet_entries('2448, 2506, 2519, 2552')) pd  inner join (select x1.timesheetid,x2.entryid, x2.entrydate, x1.userId, x1.startDate, x1.endDate 			from timesheet_data x1 			left outer join timesheet_entry x2 			on x1.timesheetid = x2.timesheetid) td  on pd.resource_ppm_id = td.userId  and case when entrydate is null then (date_of_sprint::date >= startDate::date and date_of_sprint::date <= endDate::date) else (date_of_sprint::date = entrydate::date) end'''.format(sprint_list)
    #timesheet_entry =  pd.read_sql_query(query, con=conn)

    entryTypeId = 1
    companyId = 1
    query1 = '''select timesheetid,userid from timesheet_data'''
    timesheet_users = pd.read_sql_query(query1, con=conn)
    print(timesheet_users)
    for index, row1 in timesheet_users.iterrows():
        print(row1['timesheetid'], row1['userid'])
        #query = '''select pd.*, td.timesheetId::bigint timesheetId, td.entryid, td.entrydate from (select  resource_ppm_id::bigint,to_char(date_of_sprint, 'YYYY/MM/DD') date_of_sprint,per_day_time_spent,project_ppm_id::bigint,task_ppm_id::bigint,resource_role_id::bigint from idw.planview_get_timesheet_entries('2448, 2506, 2519, 2552')) pd  inner join (select x1.timesheetid,x2.entryid, x2.entrydate, x1.userId, x1.startDate, x1.endDate 			from timesheet_data x1 			left outer join timesheet_entry x2 			on x1.timesheetid = x2.timesheetid) td  on pd.resource_ppm_id = td.userId  and case when entrydate is null then (date_of_sprint::date >= startDate::date and date_of_sprint::date <= endDate::date) else (date_of_sprint::date = entrydate::date) end  where pd.resource_ppm_id = '{1}' and td.timesheetid = {2} '''.format(sprint_list, row['timesheetid'], row['userid'])
        query = '''select pd.*, td.timesheetId::bigint timesheetId, td.entryid, to_char(td.entrydate::date, 'YYYY/MM/DD') entrydate from (select  resource_ppm_id::bigint,to_char(date_of_sprint, 'YYYY/MM/DD') date_of_sprint,per_day_time_spent,project_ppm_id::bigint,task_ppm_id::bigint,resource_role_id::bigint from idw.planview_get_timesheet_entries('2448, 2506, 2519, 2552')) pd  inner join (select x1.timesheetid,x2.entryid, x2.entrydate, x1.userId, x1.startDate, x1.endDate 			from timesheet_data x1 			left outer join timesheet_entry x2 			on x1.timesheetid = x2.timesheetid) td  on pd.resource_ppm_id = td.userId  and case when entrydate is null then (date_of_sprint::date >= startDate::date and date_of_sprint::date <= endDate::date) else (date_of_sprint::date = entrydate::date) end  where pd.resource_ppm_id = {1} and td.timesheetid = {2} '''.format(
            sprint_list, row1['userid'], row1['timesheetid'])
        timesheet_entry = pd.read_sql_query(query, con=conn)
        print(timesheet_entry)
        strmy = []
        str1 = '['
        for index, row in timesheet_entry.iterrows():
            print(row)
            l1 = row['entryid']
            print(l1)
            #x = "{" + '''"companyId": 1, "entryId" : {0}, "entryDate" :  '{1}', "entryHours": {2},"entryTypeId": 1, "level1Id": {3}, "level2Id": {4}, "level3Id": {5}, "timesheetId": {6}'''.format(row['entryid'],row['entrydate'],row['per_day_time_spent'],row['project_ppm_id'],row['task_ppm_id'],row['resource_role_id'],row['timesheetid']) + "}"
            x = '{' + """'companyId': 1, 'entryId' : {0}, 'entryDate' :  '{1}', 'entryHours': {2},'entryTypeId': 1, 'level1Id': {3}, 'level2Id': {4}, 'level3Id': {5}, 'timesheetId': {6}""".format(
                row['entryid'], row['entrydate'], row['per_day_time_spent'],
                row['project_ppm_id'], row['task_ppm_id'],
                row['resource_role_id'], row['timesheetid']) + '}'
            #strmy.append(x)
            str1 = str1 + x + ","
        x1 = str1.rstrip(',') + "]"
        print(x1)
        if x1 != '[]':
            insertTimesheet1(row1['timesheetid'], row1['userid'], x1)