Esempio n. 1
0
def get_holiday_count(start_date, end_date):
    """
    Returns the number of workdays in the given period
    :param start_date:
    :param end_date:
    :return: number of holiday
    """
    g_start = jalali.Persian(start_date).gregorian_datetime()
    g_end = jalali.Persian(end_date).gregorian_datetime()

    # print("Start:" + str(g_start))
    # print("End:" + str(g_end))

    data_path = '../data'
    events_path = join(data_path, 'events.json')
    holiday_count = 0
    week = {}
    with open(events_path, 'rt', encoding='utf-8') as events_file:
        holidays = json.load(events_file)
        for record in holidays["events"]:
            record_year = record['year']
            record_month = record['month']
            record_day = record['day']
            if record['year'] == -1:
                m = re.match(r'^(\d{4})\D(\d{1,2})\D(\d{1,2})$', start_date)
                if m:
                    record_year = int(m.group(1))
            record_date = jalali.Persian(record_year, record_month, record_day)
            if record_date and g_start <= record_date.gregorian_datetime(
            ) <= g_end:
                # print(record_date.gregorian_datetime())
                if record['holiday']:
                    holiday_count += 1

    for i in range((g_end - g_start).days):
        day = calendar.day_name[(g_start +
                                 datetime.timedelta(days=i + 1)).weekday()]
        week[day] = week[day] + 1 if day in week else 1
    # print("Holiday:" + str(holiday_count))
    fridays = 0
    if 'Friday' in week:
        fridays = week['Friday']
        # print("Friday:" + str(week['Friday']))
    return holiday_count + fridays
Esempio n. 2
0
def main():
    analytics = ga_engine.initialize_analyticsreporting('web')
    limit_date = datetime.datetime.now().date()
    ref_date = validation(analytics)

    for i in range((limit_date - ref_date).days - 1):
        step_time = ref_date + relativedelta(days=+i)
        year, month = jalali.Gregorian(step_time).persian_tuple()[0:2]
        custom_start = jalali.Persian(year, month, 1).gregorian_datetime()
        df_part1 = active_users.fetch_data_daily(
            VIEW_ID, analytics, step_time.strftime('%Y-%m-%d'), 'web')
        df_part1.columns = ['date', 'category', 'sessions', 'dailyUsers']
        df_part2 = active_users.fetch_data_monthly(
            VIEW_ID, analytics,
            step_time.replace(day=1).strftime('%Y-%m-%d'),
            step_time.strftime('%Y-%m-%d'), 'web')
        df_part2.columns = ['category', 'month', 'monthlyUsers']
        df_part3 = active_users.fetch_data_custom_wrapper(
            VIEW_ID, analytics, custom_start, step_time, 'monthlyUsersJalali',
            'web')
        df_part4 = active_users.fetch_data_custom_wrapper(
            VIEW_ID, analytics, step_time + relativedelta(days=-29), step_time,
            '30DaysWindow', 'web')

        df_part1['date'] = pd.to_datetime(df_part1['date'])
        total_df = df_part1.join(df_part2.set_index('category'), on='category')
        total_df = total_df.join(df_part3.set_index('category'), on='category')
        total_df = total_df.join(df_part4.set_index('category'), on='category')
        total_df.drop(['month'], axis=1, inplace=True)

        print(total_df)

        try:
            cursor.fast_executemany = True
            sql_comm = '''INSERT INTO [{}].[dbo].[{}]
            ([date],[category],[sessions],[dailyUsers],[monthlyUsers],[monthlyUsersJalali],[30DaysWindow])
             VALUES (?,?,?,?,?,?,?)'''.format(DB_NAME, TABLE_NAME)
            cursor.executemany(sql_comm, total_df.values.tolist())
            cursor.commit()
            doc = logger.create_log('Insert',
                                    'Ack',
                                    step_time,
                                    socket.gethostname(),
                                    'Successful Insert',
                                    server_len=len(total_df.index),
                                    database_len=len(total_df.index))
            es_engine.log_into_es(es, 'textlogs-{}'.format(INDX), doc)
        except Exception as e:
            doc = logger.create_log('Insert', 'Nack', step_time,
                                    socket.gethostname(), str(e))
            es_engine.log_into_es(es, 'textlogs-{}'.format(INDX), doc)

        time.sleep(2)
Esempio n. 3
0
            record_date = jalali.Persian(record_year, record_month, record_day)
            if record_date and g_start <= record_date.gregorian_datetime(
            ) <= g_end:
                # print(record_date.gregorian_datetime())
                if record['holiday']:
                    holiday_count += 1

    for i in range((g_end - g_start).days):
        day = calendar.day_name[(g_start +
                                 datetime.timedelta(days=i + 1)).weekday()]
        week[day] = week[day] + 1 if day in week else 1
    # print("Holiday:" + str(holiday_count))
    fridays = 0
    if 'Friday' in week:
        fridays = week['Friday']
        # print("Friday:" + str(week['Friday']))
    return holiday_count + fridays


if __name__ == '__main__':
    logging.basicConfig(
        filename='../logs/utils_time.log',
        filemode='a',
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        level=logging.DEBUG)
    print(get_holiday_count("1396/11/21", "1396/11/22"))

    g_start = jalali.Persian("1396/11/21").gregorian_datetime()
    g_end = jalali.Persian("1396/11/22").gregorian_datetime()
    print(get_holiday_count_datetime(g_start, g_end))
    dates_string = []
    dates_day = []
    dates_path = join(data_path, 'pickup_dates.pkl')
    if isfile(dates_path):
        logger.info('Loading from file...')
        with open(dates_path, 'rb') as dates_file:
            dates_string, dates_day = pickle.load(dates_file)
    else:
        logger.info('Computing data')
        with open(delivery_path) as csv_file:
            csv_reader = csv.reader(csv_file)
            index = 0
            for row in csv_reader:
                dates_string.append(row[5])
                dates_day.append((jalali.Persian(row[5]).gregorian_datetime() -
                                  datetime.datetime(1970, 1, 1).date()).days)
                index += 1
            for i in range(len(dates_day)):
                days = dates_day[i]
                date1 = jalali.Gregorian(
                    (datetime.datetime(1970, 1, 1) +
                     datetime.timedelta(days=days)).strftime(
                         '%Y-%m-%d')).persian_string('{:04d}/{:02d}/{:02d}')
                date2 = dates_string[i]
                assert date1 == date2
            with open(dates_path, 'wb') as dates_file:
                pickle.dump((dates_string, dates_day), dates_file)
            logger.info('Saving to file...')
    logger.info('Data is ready.')
    dates_day = []
    dates_path = join(data_path, 'dates.pkl')
    if isfile(dates_path):
        logger.info('Loading from file...')
        with open(dates_path, 'rb') as dates_file:
            dates_string, dates_day = pickle.load(dates_file)
    else:
        logger.info('Computing data')
        with open(delivery_path) as csv_file:
            csv_reader = csv.reader(csv_file)
            index = 0
            for row in csv_reader:
                if index % 50 == 0:
                    dates_string.append(row[7])
                    dates_day.append(
                        (jalali.Persian(row[7]).gregorian_datetime() -
                         datetime.datetime(1970, 1, 1)).days)
                index += 1
            for i in range(len(dates_day)):
                days = dates_day[i]
                date1 = jalali.Gregorian(
                    (datetime.datetime(1970, 1, 1) +
                     datetime.timedelta(days=days)).strftime(
                         '%Y-%m-%d')).persian_string('{:04d}/{:02d}/{:02d}')
                date2 = dates_string[i]
                assert date1 == date2
            with open(dates_path, 'wb') as dates_file:
                pickle.dump((dates_string, dates_day), dates_file)
            logger.info('Saving to file...')
    logger.info('Data is ready.')
    dates_string = []
    dates_day = []
    dates_path = join(data_path, 'parcel_info_dates.pkl')
    if isfile(dates_path):
        logger.info('Loading from file...')
        with open(dates_path, 'rb') as dates_file:
            dates_string, dates_day = pickle.load(dates_file)
    else:
        logger.info('Computing data')
        with open(parcel_info_path) as csv_file:
            csv_reader = csv.reader(csv_file)
            index = 0
            for row in csv_reader:
                dates_string.append(row[3])
                dates_day.append((jalali.Persian(row[3]).gregorian_datetime() - datetime.datetime(1970, 1, 1).date()).days)
                index += 1
            for i in range(len(dates_day)):
                days = dates_day[i]
                date1 = jalali.Gregorian((datetime.datetime(1970, 1, 1) + datetime.timedelta(days=days))
                                       .strftime('%Y-%m-%d')).persian_string('{:04d}/{:02d}/{:02d}')
                date2 = dates_string[i]
                assert date1 == date2
            with open(dates_path, 'wb') as dates_file:
                pickle.dump((dates_string, dates_day), dates_file)
            logger.info('Saving to file...')
    logger.info('Data is ready.')

    dates_string.sort()
    size = len(dates_string)