Exemple #1
0
def run(batch_id,
        source_file_name,
        output_file_name,
        source_operating_hours,
        reference_datetime=today_datetime):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    data_gen.add_constant_column('CreatedDate__c',
                                 reference_datetime.isoformat(sep=' '))

    operating_hours = data_gen.load_dataset("OperatingHours",
                                            source_operating_hours,
                                            ['Id', 'External_ID__c']).dict(
                                                'Id', 'External_ID__c')

    data_gen.add_map_column('OperatingHours.External_Id__c',
                            'OperatingHoursId', operating_hours)

    data_gen.apply_transformations()

    data_gen.write(output_file_name,
                   columns=[
                       'External_ID__c', 'OperatingHours.External_Id__c',
                       'StartTime', 'EndTime'
                   ])
Exemple #2
0
def run(batch_id, source_file_name, output_file_name, filter_function=None):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    data_gen.add_formula_column(
        'Contact.External_Id__c',
        lambda cv: cv['Account.External_Id__c'].replace(
            'W_Account', 'W_Contact'))

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()

    if filter_function:
        data_gen.filter(filter_function)

    output_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'Account.External_Id__c',
        'Contact.External_Id__c', 'CreatedDate__c', 'ClosedDate__c',
        'LastActivityDate__c', 'Origin', 'Tier', 'Product_Family_KB__c',
        'Priority', 'SLA', 'Reason', 'Type_of_Support__c', 'CSAT__c', 'Status',
        'First_Contact_Close__c', 'Time_Open__c', 'Team__c',
        'close_date_offset', 'Offer_Voucher__c', 'Send_FieldService__c',
        'IsEscalated', 'MilestoneStatus__c', 'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'AccountExternalId__c', 'Owner.External_Id__c',
        'Name', 'Amount', 'StageName', 'LeadSource', 'Type',
        'ForecastCategoryName', 'CloseDate', 'CreatedDate__c',
        'RecordType.DeveloperName', 'LastActivityDate__c', 'Product2Name__c',
        'Product2Family__c', 'Region__c', 'TimeToClose__c',
        'SalesStageCount__c', 'AccountAnnualRevenue__c',
        'AccountNumberOfEmployees__c', 'AccountBookings__c', 'Competitor__c',
        'DealSizeCategory__c', 'Exec_Meeting__c', 'Interactive_Demo__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('AccountExternalId__c', 'Account.External_Id__c')
    data_gen.rename_column('CreatedDate__c', 'DateTimeCreated__c')

    data_gen.add_formula_column(
        'LastModifiedDate__c',
        lambda cv: dateutil.parser.parse(cv['LastActivityDate__c']))

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()

    data_gen.write(output_file_name)
    def generate(self, selected_filters=None, columns=None, count=5):
        if selected_filters is None:
            selected_filters = {}
        if columns is None:
            columns = self.get_columns()

        data_gen = DataGenerator()
        data_gen.row_count = count

        if 'gender' in selected_filters:
            if selected_filters['gender'] == 'male':
                data_gen.add_constant_column('Gender', 'Male')
            else:
                data_gen.add_constant_column('Gender', 'Female')
        else:
            data_gen.add_formula_column('Gender', formula=fake.gender)

        def first_name_formula(column_values):
            if column_values['Gender'] == 'Male':
                return fake.first_name_male()
            else:
                return fake.first_name_female()

        data_gen.add_formula_column('First Name', first_name_formula)

        data_gen.add_formula_column('Last Name', formula=fake.last_name)
        data_gen.add_formula_column('Name', lambda cv: cv['First Name'] + ' ' + cv['Last Name'])

        data_gen.apply_transformations()
        return list(map(lambda r: data_gen.row_to_column_values(r, columns).values(), data_gen.rows))
def run(batch_id, source_file_name, output_file_name, reference_date=today):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['External_Id__c', 'Name', 'UserRole.Name']
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.filter(lambda cv: 'RVP' not in cv['UserRole.Name'])
    data_gen.filter(
        lambda cv: 'CSM' not in cv['UserRole.Name'])  # comes from Service

    data_gen.rename_column('External_Id__c', 'QuotaOwner_Id__c')
    data_gen.rename_column('Name', 'OwnerName__c')

    # generate id
    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_Quota.' + str(data_gen.current_row + 1))

    data_gen.duplicate_rows(24)

    def quota_formula():
        # first month of quarter = 300k
        # second month of quarter = 500k
        # third month of quarter = 500k
        quarter = data_gen.current_row % 3
        if quarter == 0:
            return 300000
        elif quarter == 1:
            return 750000
        else:
            return 500000

    data_gen.add_formula_column('QuotaAmount__c', quota_formula)

    current_year = reference_date.year
    last_year = current_year - 1

    def start_date_formula():
        user_row = data_gen.current_row % 24
        month = str((user_row % 12) + 1).zfill(2)
        day = '01'
        if user_row < 12:
            year = str(last_year)
        else:
            year = str(current_year)
        return dateutil.parser.parse(year + '-' + month + '-' + day).date()

    data_gen.add_formula_column('StartDate__c', start_date_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()
    data_gen.write(output_file_name, [
        'External_Id__c', 'QuotaOwner_Id__c', 'OwnerName__c', 'StartDate__c',
        'QuotaAmount__c'
    ])
Exemple #6
0
def run(batch_id, source_file_name, output_file_name, products_file_name, pricebook_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['External_Id__c', 'Product2Name__c', 'Amount']
    data_gen.load_source_file(source_file_name, source_columns)

    # load datasets
    products = data_gen.load_dataset('products', products_file_name)
    products_by_name = products.group_by('Name')

    pricebook = data_gen.load_dataset('pricebook', pricebook_file_name)
    pricebook_by_product = pricebook.group_by('Product2.External_Id__c')

    # rename columns
    data_gen.rename_column('External_Id__c', 'Opportunity.External_Id__c')
    data_gen.rename_column('Amount', 'TotalPrice')

    data_gen.add_formula_column('External_Id__c', formula=lambda: 'W_OpportunityLineItem.' + str(data_gen.current_row + 1))

    # transform product name to code
    data_gen.add_formula_column('ProductCode', lambda cv: products_by_name[cv['Product2Name__c']][0]['ProductCode'])

    # generate product reference id
    data_gen.add_formula_column('Product2.External_Id__c',
                                lambda cv: products_by_name[cv['Product2Name__c']][0]['External_Id__c'])

    # generate list price
    data_gen.add_formula_column('ListPrice', lambda cv: pricebook_by_product[cv['ProductCode']][0]['UnitPrice'])

    # generate pricebook reference id
    data_gen.add_formula_column('PricebookEntry.External_Id__c',
                                lambda cv: pricebook_by_product[cv['ProductCode']][0]['External_Id__c'])

    # generate quantity
    def quanity_formula(column_values):
        total_price = int(column_values['TotalPrice'])
        list_price = int(column_values['ListPrice'])
        quantity = total_price / list_price
        if quantity <= 0:
            quantity = 1
        return ceil(quantity)
    data_gen.add_formula_column('Quantity', quanity_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c',
        'Opportunity.External_Id__c',
        'TotalPrice',
        'PricebookEntry.External_Id__c',
        'Quantity',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
def run(batch_id, source_file_name, output_file_name, reference_datetime=today, id_offset=0):
    
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c',
        'Owner.External_Id__c',
        'CreatedDate__c',
        'LastActivityDate__c'
    ]

    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('External_Id__c', 'What.External_Id__c')
    data_gen.rename_column('LastActivityDate__c', 'ActivityDate')

    # generate a random number of tasks per opportunity
    data_gen.duplicate_rows(duplication_factor=lambda: randint(1, 3))

    data_gen.add_formula_column('External_Id__c', formula=lambda: 'W_Task.' + str(id_offset + data_gen.current_row + 1))

    data_gen.add_formula_column('TaskSubtype', formula=task.oppty_task_subtype)
    data_gen.add_formula_column('CallDurationInSeconds', formula=task.task_call_duration)
    data_gen.add_formula_column('CallDisposition', formula=task.task_call_disposition)
    data_gen.add_formula_column('CallType', formula=task.task_call_type)
    data_gen.add_formula_column('Status', formula=task.task_status)
    data_gen.add_formula_column('Priority', formula=task.task_priority)

    def create_date_formula(column_values):
        oppty_create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        oppty_last_activity_date = dateutil.parser.parse(column_values['ActivityDate'])
        create_date = fake.date_time_between_dates(oppty_create_date, oppty_last_activity_date)
        if create_date > reference_datetime:
            create_date = reference_datetime
        return create_date.isoformat(sep=' ')
    
    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    def activity_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c']).date()
        return (create_date + timedelta(days=randint(0, 14))).isoformat()
    
    data_gen.add_formula_column('ActivityDate', activity_date_formula)

    data_gen.add_formula_column('Subject', formula=task.task_subject)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write
    data_gen.apply_transformations()
    data_gen.write(output_file_name)
def run(input_path, output_path, config_source):
    configs = json.loads(file_to_string(config_source))
    time_shifting_file = configs.get('timeShiftingPivot').get('fileName')
    time_shifting_field = configs.get('timeShiftingPivot').get('fieldName')

    data_gen = DataGenerator()

    data_gen.load_source_file(input_path + time_shifting_file,
                              time_shifting_field)
    if time_shifting_field != 'LastProcessedDate':
        aux_date = max([x[0] for x in data_gen.rows])[:10]
    else:
        # LastProcessedDate is a constant column, do not iterate through all rows
        aux_date = data_gen.rows[0][0][:10]

    delta_to_increase = (today -
                         datetime.strptime(aux_date, "%Y-%m-%d").date()).days

    def aux_date_formula(dateToShift):
        def date_formula(column_values):
            if column_values[dateToShift] != "":
                create_date = dateutil.parser.parse(column_values[dateToShift])
                if len(column_values[dateToShift]) == 19:
                    return (create_date + timedelta(days=delta_to_increase)
                            ).strftime('%Y-%m-%d %H:%M:%S')
                elif len(column_values[dateToShift]) < 24:
                    return (
                        create_date +
                        timedelta(days=delta_to_increase)).strftime('%Y-%m-%d')
                else:
                    return (create_date + timedelta(days=delta_to_increase)
                            ).strftime('%Y-%m-%dT%H:%M:%S.000Z')

        data_gen.add_formula_column(dateToShift, date_formula)

    if not output_path:
        output_path = 'output/'

    for input_file in configs.get('inputFiles'):

        file_name = input_file.get('fileName')
        date_fields = input_file.get('dateFields', [])
        print("Timeshifting process for ", file_name, " will start ...")
        data_gen.load_source_file(input_path + file_name)

        data_gen.add_constant_column('LastProcessedDate', today.isoformat())

        for dateToShift in date_fields:
            aux_date_formula(dateToShift)

        data_gen.apply_transformations()
        data_gen.write(output_path + file_name)
Exemple #9
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['KnowledgeArticle.External_Id__c', 'CreatedDate__c']
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('KnowledgeArticle.External_Id__c', 'External_Id__c')

    data_gen.add_formula_column('ArticleNumber__c',
                                lambda: data_gen.current_row + 1)

    data_gen.add_formula_column('CaseAssociationCount__c',
                                lambda: randint(1, 6))

    def first_published_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        return (create_date +
                timedelta(days=randint(1, 10))).isoformat(sep=' ')

    data_gen.add_formula_column('FirstPublishedDate__c',
                                formula=first_published_date_formula)

    def last_published_date_formula(column_values):
        first_publised_date = dateutil.parser.parse(
            column_values['FirstPublishedDate__c'])
        return (first_publised_date +
                timedelta(days=randint(1, 10))).isoformat(sep=' ')

    data_gen.add_formula_column('LastPublishedDate__c',
                                formula=last_published_date_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'ArticleNumber__c', 'External_Id__c', 'CaseAssociationCount__c',
        'CreatedDate__c', 'FirstPublishedDate__c', 'LastPublishedDate__c',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #10
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['External_Id__c','UserRole.Name']
    data_gen.load_source_file(source_file_name, source_columns)

    # data_gen.filter(lambda cv: 'RVP' in cv['UserRole.Name']) # commented out because using shape file from service with no RVP value in UserRole.Name
    data_gen.filter(lambda cv: 'CSM' in cv['UserRole.Name']) # comes from Service

    data_gen.rename_column('External_Id__c', 'ForecastUser.External_Id__c')

    data_gen.rename_column('UserRole.Name', 'Name')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()
    data_gen.write(output_file_name, ['Name','ForecastUser.External_Id__c','analyticsdemo_batch_id__c'])
def run(batch_id, source_file_name, output_file_name, source_products):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    products = data_gen.load_dataset("Products", source_products,
                                     ['Id', 'External_ID__c']).dict(
                                         'Id', 'External_ID__c')
    data_gen.add_map_column('Product2.External_Id__c', 'Product2Id', products)

    data_gen.add_constant_column('Pricebook2.Name', 'Standard Price Book')

    data_gen.apply_transformations()

    data_gen.write(output_file_name,
                   columns=[
                       'External_Id__c', 'Product2.External_Id__c', 'IsActive',
                       'Pricebook2.Name', 'UnitPrice'
                   ])
Exemple #12
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    account_columns = ['External_Id__c']
    data_gen.load_source_file(source_file_name, account_columns)

    data_gen.rename_column('External_Id__c', 'Account.External_Id__c')
    data_gen.add_formula_column(
        'External_Id__c', lambda cv: cv['Account.External_Id__c'].replace(
            'W_Account', 'W_Contact'))

    data_gen.add_formula_column('FirstName', formula=fake.first_name)
    data_gen.add_formula_column('LastName', formula=fake.last_name)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()
    data_gen.write(output_file_name)
Exemple #13
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['KnowledgeArticle.External_Id__c']
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('KnowledgeArticle.External_Id__c',
                           'Parent.External_Id__c')

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_KCSArticle_DCS.' + str(data_gen.current_row + 1))

    data_gen.add_constant_column('DataCategoryGroupName__c', 'All')

    data_gen.add_constant_column('DataCategoryName__c', 'All')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c', 'Parent.External_Id__c', 'DataCategoryGroupName__c',
        'DataCategoryName__c', 'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #14
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'KnowledgeArticle.External_Id__c',
        'User.External_Id__c',
        'CreatedDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('KnowledgeArticle.External_Id__c', 'Parent.External_Id__c')
    data_gen.rename_column('User.External_Id__c', 'Owner.External_Id__c')

    data_gen.add_formula_column('External_Id__c', formula=lambda: 'W_KCSArticle_ViewStat.' + str(data_gen.current_row + 1))

    channels = [
        'App',
        'Desktop Site',
        'Mobile Site'
    ]
    data_gen.add_formula_column('Channel__c', channels)

    data_gen.add_formula_column('ViewCount__c', formula=lambda: randint(1, 100))

    data_gen.add_formula_column('NormalizedScore__c', formula=lambda: round(uniform(1, 10), 3))
    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)
    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c',
        'Channel__c',
        'Parent.External_Id__c',
        'ViewCount__c',
        'NormalizedScore__c',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #15
0
def run(batch_id, source_file_name, output_file_name, source_cases, source_accounts, source_work_types, source_service_appointments, reference_datetime=today_datetime):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    cases = data_gen.load_dataset("Cases", source_cases, ['Id', 'External_ID__c']).dict('Id', 'External_ID__c')
    data_gen.add_map_column('Case.External_Id__c', 'CaseId', cases)

    accounts = data_gen.load_dataset("Accounts", source_accounts, ['Id', 'External_ID__c']).dict('Id', 'External_ID__c')
    data_gen.add_map_column('Account.External_Id__c', 'AccountId', accounts)

    work_types = data_gen.load_dataset("WorkTypes", source_work_types, ['Id', 'External_ID__c']).dict('Id', 'External_ID__c')
    data_gen.add_map_column('WorkType.External_Id__c', 'WorkTypeId', work_types)

    data_gen.add_constant_column('Pricebook2.Name', 'Standard Price Book')

    service_appointment_dates = data_gen.load_dataset("ServiceAppointmentDates", source_service_appointments, ['WorkOrder.External_Id__c', 'CreatedDate__c']).dict('WorkOrder.External_Id__c', 'CreatedDate__c')
    service_appointment_dates[None] = reference_datetime + timedelta(days=-1)
    data_gen.add_map_column('CreatedDate__c', 'External_ID__c', service_appointment_dates)

    data_gen.apply_transformations()

    data_gen.filter(lambda cv: cv['WorkType.External_Id__c'].startswith('WT.'))

    data_gen.apply_transformations()

    data_gen.write(output_file_name, columns=[
        'External_ID__c',
        'CreatedDate__c',
        'Status',
        'Pricebook2.Name',
        'Priority',
        'Case.External_Id__c',
        'Account.External_Id__c',
        'WorkType.External_Id__c'
    ])
Exemple #16
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'CreatedDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('External_Id__c', 'Case.External_Id__c')
    data_gen.rename_column('Owner.External_Id__c', 'User.External_Id__c')

    # todo one case article per case? at most 1? distribution?
    data_gen.duplicate_rows(
        duplication_factor=lambda: choice([0, 1], p=[.75, .25]))

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_CaseArticle.' + str(data_gen.current_row + 1))
    data_gen.add_formula_column(
        'KnowledgeArticle.External_Id__c',
        formula=lambda: 'W_KCSArticle.' + str(data_gen.current_row + 1))

    data_gen.add_constant_column('ArticleVersionNumber__c', 1)

    data_gen.add_constant_column('IsSharedByEmail__c', ['true', 'false'])

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c', 'User.External_Id__c', 'ArticleVersionNumber__c',
        'CreatedDate__c', 'KnowledgeArticle.External_Id__c',
        'IsSharedByEmail__c', 'Case.External_Id__c',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #17
0
def run(batch_id,
        source_file_name,
        output_file_name,
        reference_datetime=today):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    data_gen.rename_column('External_Id__c', 'Case.External_Id__c')
    data_gen.rename_column('Owner.External_Id__c', 'User.External_Id__c')

    data_gen.duplicate_rows(duplication_factor=lambda: choice(
        [1, 2, 3, 4, 5], p=[.65, .15, .10, .05, .05]))

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_AgentWork.' + str(data_gen.current_row + 1))

    data_gen.add_copy_column('RequestDateTime__c', 'CreatedDate__c')

    def created_date_formula(column_values):
        created_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        closed_date = dateutil.parser.parse(column_values['ClosedDate__c'])
        if closed_date > reference_datetime:
            closed_date = reference_datetime
        mid_date = created_date + (closed_date - created_date) / 2
        return fake.date_time_between_dates(created_date,
                                            mid_date).isoformat(sep=' ')

    data_gen.add_formula_column('CreatedDate__c', created_date_formula)

    def assigned_date_formula(column_values):
        created_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        return (created_date +
                timedelta(seconds=randint(0, 120))).isoformat(sep=' ')

    data_gen.add_formula_column('AssignedDateTime__c', assigned_date_formula)

    def accept_date_formula(column_values):
        assigned_date = dateutil.parser.parse(
            column_values['AssignedDateTime__c'])
        return (assigned_date +
                timedelta(seconds=randint(30, 600))).isoformat(sep=' ')

    data_gen.add_formula_column('AcceptDateTime__c', accept_date_formula)

    def close_date_formula(column_values):
        accept_date = dateutil.parser.parse(column_values['AcceptDateTime__c'])
        return (accept_date +
                timedelta(seconds=randint(30, 1800))).isoformat(sep=' ')

    data_gen.add_formula_column('CloseDateTime__c', close_date_formula)

    def active_time_formula(column_values):
        accept_date = dateutil.parser.parse(column_values['AcceptDateTime__c'])
        close_date = dateutil.parser.parse(column_values['CloseDateTime__c'])
        return int((close_date - accept_date).total_seconds())

    data_gen.add_formula_column('ActiveTime__c', active_time_formula)

    data_gen.add_formula_column('AgentCapacityWhenDeclined__c',
                                lambda: randint(30, 1800))

    def cancel_date_formula(column_values):
        assigned_date = dateutil.parser.parse(
            column_values['AssignedDateTime__c'])
        return (assigned_date +
                timedelta(seconds=randint(30, 600))).isoformat(sep=' ')

    data_gen.add_formula_column('CancelDateTime__c', cancel_date_formula)

    data_gen.add_formula_column('CapacityPercentage__c',
                                lambda: randint(1, 101))

    data_gen.add_formula_column('CapacityWeight__c', lambda: randint(1, 7))

    def decline_date_formula(column_values):
        assigned_date = dateutil.parser.parse(
            column_values['AssignedDateTime__c'])
        return (assigned_date +
                timedelta(seconds=randint(30, 600))).isoformat(sep=' ')

    data_gen.add_formula_column('DeclineDateTime__c', decline_date_formula)

    data_gen.add_formula_column('DeclineReason__c', formula=fake.sentence)

    data_gen.add_copy_column('HandleTime__c', 'ActiveTime__c')

    data_gen.add_formula_column('OriginalQueue.DeveloperName', [
        'GeneralQueue', 'InternationalQueue', 'Knowledge_Translations',
        'Social_Queue', 'TargetCampaign', 'Tier1Queue', 'Tier2Queue',
        'Tier3Queue'
    ])

    data_gen.add_formula_column('PushTimeout__c', lambda: randint(0, 100))

    def push_timeout_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        return create_date + timedelta(seconds=column_values['PushTimeout__c'])

    data_gen.add_formula_column('PushTimeoutDateTime__c',
                                push_timeout_date_formula)

    data_gen.add_formula_column(
        'ServiceChannel.DeveloperName',
        ['Cases', 'LiveMessage', 'sfdc_liveagent', 'Leads'])

    def speed_to_answer_formula(column_values):
        request_date = dateutil.parser.parse(
            column_values['RequestDateTime__c'])
        accept_date = dateutil.parser.parse(column_values['AcceptDateTime__c'])
        return int((accept_date - request_date).total_seconds())

    data_gen.add_formula_column('SpeedToAnswer__c', speed_to_answer_formula)

    data_gen.add_formula_column('Status__c', [
        'Assigned', 'Unavailable', 'Declined', 'Opened', 'Closed',
        'DeclinedOnPushTimeout', 'Canceled'
    ])

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    def filter_func(column_values):
        created_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        cutoff_date = reference_datetime - timedelta(days=60)
        return column_values['Origin'] == 'Chat' and created_date >= cutoff_date

    data_gen.filter(filter_function=filter_func)

    data_gen.apply_transformations()

    data_gen.sort_by('RequestDateTime__c')

    output_columns = [
        'External_Id__c', 'RequestDateTime__c', 'CreatedDate__c',
        'AssignedDateTime__c', 'AcceptDateTime__c', 'CloseDateTime__c',
        'ActiveTime__c', 'AgentCapacityWhenDeclined__c', 'CancelDateTime__c',
        'CapacityPercentage__c', 'CapacityWeight__c', 'DeclineDateTime__c',
        'DeclineReason__c', 'HandleTime__c', 'OriginalQueue.DeveloperName',
        'PushTimeout__c', 'PushTimeoutDateTime__c',
        'ServiceChannel.DeveloperName', 'SpeedToAnswer__c', 'Status__c',
        'User.External_Id__c', 'Case.External_Id__c',
        'analyticsdemo_batch_id__c'
    ]
    return data_gen.write(output_file_name, output_columns, 6000)
def run(input_path, output_path, config_source):
    configs = json.loads(file_to_string(config_source))
    time_shifting_file = configs.get('timeShiftingPivot').get('fileName')
    time_shifting_field = configs.get('timeShiftingPivot').get('fieldName')

    data_gen = DataGenerator()

    data_gen.load_source_file(input_path + time_shifting_file,
                              time_shifting_field)

    aux_date = max([x[0] for x in data_gen.rows])[:10]

    delta_to_increase = (today -
                         datetime.strptime(aux_date, "%Y-%m-%d").date()).days

    def aux_date_formula(dateToShift):
        def date_formula(column_values):
            if column_values[dateToShift] != "":
                create_date = dateutil.parser.parse(column_values[dateToShift])
                if len(column_values[dateToShift]) == 19:
                    return (create_date + timedelta(days=delta_to_increase)
                            ).strftime('%Y-%m-%d %H:%M:%S')
                elif len(column_values[dateToShift]) < 24:
                    return (
                        create_date +
                        timedelta(days=delta_to_increase)).strftime('%Y-%m-%d')
                else:
                    return (create_date + timedelta(days=delta_to_increase)
                            ).strftime('%Y-%m-%dT%H:%M:%S.000Z')

        data_gen.add_formula_column(dateToShift, date_formula)

    current_year = today.year
    map_quota_year = {}

    def quotas_date_formula(dateToShift):
        def date_formula(column_values):
            if column_values[dateToShift] != "":
                quota_year = column_values[dateToShift][:4]
                d = column_values[dateToShift].replace(
                    quota_year, map_quota_year[quota_year])
                return d

        date_index = data_gen.column_names[dateToShift]
        dates = [e[date_index] for e in data_gen.rows]
        max_year = max(dates)[:4]
        min_year = min(dates)[:4]
        map_quota_year[max_year] = str(current_year)
        map_quota_year[min_year] = str(current_year - 1)

        data_gen.add_formula_column(dateToShift, date_formula)

    if not output_path:
        output_path = 'output/'

    for input_file in configs.get('inputFiles'):

        file_name = input_file.get('fileName')
        date_fields = input_file.get('dateFields', [])
        print("Timeshifting process for ", file_name, " will start ...")
        data_gen.load_source_file(input_path + file_name)

        if file_name not in [
                'FscDemoWeeks.csv', 'WM_Add_Assets_Prediction_Final.csv',
                'WM_Churn_Predictions_Final.csv'
        ]:
            data_gen.add_constant_column('LastProcessedDate',
                                         today.isoformat())

        for dateToShift in date_fields:
            if file_name != 'FscDemoQuota.csv':
                aux_date_formula(dateToShift)
            else:
                quotas_date_formula(dateToShift)
        data_gen.apply_transformations()
        data_gen.write(output_path + file_name)
def run(batch_id, source_file_name, output_file_name, manager_output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = ['Owner.External_Id__c', 'Team__c']
    data_gen.load_source_file(source_file_name, source_columns)
    data_gen.unique()

    # rename columns
    data_gen.rename_column('Owner.External_Id__c', 'External_Id__c')
    data_gen.rename_column('Team__c', 'UserRole.Name')

    # add 3 manager users
    west_manager = ['W_User.M.' + str(len(data_gen.rows) + 1), 'West CSM']
    east_manager = ['W_User.M.' + str(len(data_gen.rows) + 2), 'East CSM']
    central_manager = ['W_User.M.' + str(len(data_gen.rows) + 3), 'Central CSM']
    ## managers from Sales ##
    # west_manager = ['RVP West', 'W_Sales_User.M.' + str(len(data_gen.rows) + 1)]
    # east_manager = ['RVP East', 'W_Sales_User.M.' + str(len(data_gen.rows) + 2)]
    # central_manager = ['RVP Central', 'W_Sales_User.M.' + str(len(data_gen.rows) + 3)]
    ########################

    data_gen.rows.append(west_manager)
    data_gen.rows.append(east_manager)
    data_gen.rows.append(central_manager)

    # generate company name
    data_gen.add_formula_column('CompanyName', formula=fake.company)

    # generate fake first and last name
    def first_name_formula(column_values):
        id = int(column_values['External_Id__c'].split('.')[-1])
        return fake.first_name_female() if id < 13 else fake.first_name_male()
    data_gen.add_formula_column('FirstName', formula=first_name_formula)
    data_gen.add_formula_column('LastName', formula=fake.last_name)

    # generate data based on fake first and last name
    data_gen.add_formula_column('Name', lambda cv: cv['FirstName'] + ' ' + cv['LastName'])

    # generate data based on fake first and last name
    def alias_formula(column_values):
        alias = (column_values['FirstName'][0] + column_values['LastName']).lower()
        trimmed_alias = alias[:8] if len(alias) > 8 else alias
        return trimmed_alias
    data_gen.add_formula_column('Alias', formula=alias_formula)
    data_gen.add_formula_column('Username', lambda cv: cv['Alias'] + '@demo.user')
    data_gen.add_formula_column('CommunityNickname', lambda cv: cv['Alias'] + str(randint(100, 999)))
    data_gen.add_formula_column('Email', lambda cv: cv['Alias'] + '@webmail.com')

    data_gen.add_formula_column('Phone', formula=fake.phone_number)

    titles = ['Customer Service Representative', 'Senior Customer Service Representative']
    data_gen.add_formula_column('Title', lambda: choice(titles, p=[.70, .30]))

    # generate constant values
    data_gen.add_constant_column('IsActive', 'false')
    data_gen.add_constant_column('TimeZoneSidKey', 'America/Los_Angeles')
    data_gen.add_constant_column('Profile.Name', 'Standard User')
    # from oppty> data_gen.add_constant_column('Profile.Name', 'Standard User')
    data_gen.add_constant_column('LocaleSidKey', 'en_US')
    data_gen.add_constant_column('LanguageLocaleKey', 'en_US')
    data_gen.add_constant_column('EmailEncodingKey', 'ISO-8859-1')
    data_gen.add_constant_column('ForecastEnabled', 'true') # this comes from Sales

    data_gen.add_constant_column('UserPermissionsAvantgoUser', 'false')
    data_gen.add_constant_column('UserPermissionsCallCenterAutoLogin', 'false')
    data_gen.add_constant_column('UserPermissionsChatterAnswersUser', 'false')
    data_gen.add_constant_column('UserPermissionsInteractionUser', 'false')
    data_gen.add_constant_column('UserPermissionsJigsawProspectingUser', 'false')
    data_gen.add_constant_column('UserPermissionsKnowledgeUser', 'false')
    data_gen.add_constant_column('UserPermissionsLiveAgentUser', 'false')
    data_gen.add_constant_column('UserPermissionsMarketingUser', 'false')
    data_gen.add_constant_column('UserPermissionsMobileUser', 'false')
    data_gen.add_constant_column('UserPermissionsOfflineUser', 'false')
    data_gen.add_constant_column('UserPermissionsSFContentUser', 'false')
    data_gen.add_constant_column('UserPermissionsSiteforceContributorUser', 'false')
    data_gen.add_constant_column('UserPermissionsSiteforcePublisherUser', 'false')
    data_gen.add_constant_column('UserPermissionsSupportUser', 'false')
    data_gen.add_constant_column('UserPermissionsWorkDotComUserFeature', 'false')
    data_gen.add_constant_column('UserPreferencesActivityRemindersPopup', 'false')
    data_gen.add_constant_column('UserPreferencesApexPagesDeveloperMode', 'false')
    data_gen.add_constant_column('UserPreferencesCacheDiagnostics', 'false')
    data_gen.add_constant_column('UserPreferencesContentEmailAsAndWhen', 'false')
    data_gen.add_constant_column('UserPreferencesContentNoEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableAllFeedsEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableBookmarkEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableChangeCommentEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableEndorsementEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableFeedbackEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableFileShareNotificationsForApi', 'false')
    data_gen.add_constant_column('UserPreferencesDisableFollowersEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableLaterCommentEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableLikeEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableMentionsPostEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableMessageEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableProfilePostEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableRewardEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableSharePostEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisableWorkEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisCommentAfterLikeEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisMentionsCommentEmail', 'false')
    data_gen.add_constant_column('UserPreferencesDisProfPostCommentEmail', 'false')
    data_gen.add_constant_column('UserPreferencesEnableAutoSubForFeeds', 'false')
    data_gen.add_constant_column('UserPreferencesEventRemindersCheckboxDefault', 'false')
    data_gen.add_constant_column('UserPreferencesHideBiggerPhotoCallout', 'false')
    data_gen.add_constant_column('UserPreferencesHideChatterOnboardingSplash', 'false')
    data_gen.add_constant_column('UserPreferencesHideCSNDesktopTask', 'false')
    data_gen.add_constant_column('UserPreferencesHideCSNGetChatterMobileTask', 'false')
    data_gen.add_constant_column('UserPreferencesHideEndUserOnboardingAssistantModal', 'false')
    data_gen.add_constant_column('UserPreferencesHideLightningMigrationModal', 'false')
    data_gen.add_constant_column('UserPreferencesHideS1BrowserUI', 'false')
    data_gen.add_constant_column('UserPreferencesHideSecondChatterOnboardingSplash', 'false')
    data_gen.add_constant_column('UserPreferencesHideSfxWelcomeMat', 'false')
    data_gen.add_constant_column('UserPreferencesJigsawListUser', 'false')
    data_gen.add_constant_column('UserPreferencesLightningExperiencePreferred', 'false')
    data_gen.add_constant_column('UserPreferencesPathAssistantCollapsed', 'false')
    data_gen.add_constant_column('UserPreferencesPreviewLightning', 'false')
    data_gen.add_constant_column('UserPreferencesReminderSoundOff', 'false')
    data_gen.add_constant_column('UserPreferencesShowCityToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowCityToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowCountryToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowCountryToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowEmailToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowEmailToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowFaxToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowFaxToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowManagerToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowManagerToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowMobilePhoneToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowMobilePhoneToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowPostalCodeToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowPostalCodeToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowProfilePicToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowStateToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowStateToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowStreetAddressToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowStreetAddressToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowTitleToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowTitleToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowWorkPhoneToExternalUsers', 'false')
    data_gen.add_constant_column('UserPreferencesShowWorkPhoneToGuestUsers', 'false')
    data_gen.add_constant_column('UserPreferencesSortFeedByComment', 'false')
    data_gen.add_constant_column('UserPreferencesTaskRemindersCheckboxDefault', 'false')
    data_gen.add_constant_column('EmailPreferencesAutoBcc', 'false')
    data_gen.add_constant_column('EmailPreferencesAutoBccStayInTouch', 'false')
    data_gen.add_constant_column('EmailPreferencesStayInTouchReminder', 'false')
    data_gen.add_constant_column('UserPreferencesGlobalNavBarWTShown', 'false')
    data_gen.add_constant_column('UserPreferencesGlobalNavGridMenuWTShown', 'false')
    data_gen.add_constant_column('UserPreferencesCreateLEXAppsWTShown', 'false')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()
    data_gen.write(output_file_name)

    # create manager file
    data_gen.filter(lambda cv: 'CSM' not in cv['UserRole.Name'])

    manager_map = {
        'West CSR': west_manager[0],
        'East CSR': east_manager[0],
        'Central CSR': central_manager[0]
    }
    ### this is the manager file section in Sales> ###
    # # create manager file
    # data_gen.filter(lambda cv: 'RVP' not in cv['UserRole.Name'])
    # manager_map = {
    #     'West Sales': west_manager[1],
    #     'East Sales': east_manager[1],
    #     'Central Sales': central_manager[1],
    # }
    ##################################################
    data_gen.add_map_column('Manager.External_Id__c', 'UserRole.Name', manager_map)

    data_gen.apply_transformations()
    data_gen.write(manager_output_file_name, ['External_Id__c', 'Manager.External_Id__c'])
def run(batch_id,
        source_file_name,
        output_file_name,
        reference_datetime=today,
        id_offset=0):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'CreatedDate__c',
        'LastActivityDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('External_Id__c', 'What.External_Id__c')
    data_gen.rename_column('LastActivityDate__c', 'ActivityDate')

    # generate a random number of events per opportunity
    data_gen.duplicate_rows(duplication_factor=lambda: randint(1, 3))

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_Event.' + str(id_offset + data_gen.current_row + 1))

    data_gen.add_formula_column('Subject', formula=event.event_subject)
    data_gen.add_formula_column('EventSubtype', formula=event.event_subtype)
    data_gen.add_formula_column('DurationInMinutes',
                                formula=event.event_call_duration)

    is_first = True

    def create_date_formula(column_values):
        oppty_create_date = dateutil.parser.parse(
            column_values['CreatedDate__c'])
        oppty_last_activity_date = dateutil.parser.parse(
            column_values['ActivityDate'])
        nonlocal is_first
        if is_first:
            create_date = oppty_last_activity_date
        else:
            create_date = fake.date_time_between_dates(
                oppty_create_date, oppty_last_activity_date)
        is_first = False
        if create_date > reference_datetime:
            create_date = reference_datetime
        return create_date.isoformat(sep=' ')

    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    def activity_date_formula(column_values):
        create_date = dateutil.parser.parse(
            column_values['CreatedDate__c']).date()
        return (create_date + timedelta(days=randint(0, 14))).isoformat()

    data_gen.add_formula_column('ActivityDate', activity_date_formula)

    def activity_datetime_formula(column_values):
        return dateutil.parser.parse(column_values['ActivityDate'])

    data_gen.add_formula_column('ActivityDateTime', activity_datetime_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write
    data_gen.apply_transformations()
    data_gen.write(output_file_name)
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'KnowledgeArticle.External_Id__c', 'User.External_Id__c',
        'CreatedDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('KnowledgeArticle.External_Id__c',
                           'KCSArticle__ka.External_Id__c')
    data_gen.rename_column('User.External_Id__c', 'Owner.External_Id__c')

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_KCSArticleVersion.' + str(data_gen.current_row + 1))

    data_gen.add_formula_column('ArticleNumber__c',
                                lambda: data_gen.current_row + 1)

    data_gen.add_formula_column('PublishStatus__c', ['Archived', 'Online'])

    data_gen.add_constant_column('IsLatestVersion__c', 'true')
    data_gen.add_constant_column('IsVisibleInApp__c', 'true')
    data_gen.add_constant_column('IsVisibleInCsp__c', 'true')
    data_gen.add_constant_column('IsVisibleInPkb__c', 'true')
    data_gen.add_constant_column('IsVisibleInPrm__c', 'true')

    data_gen.add_constant_column('VersionNumber__c', '1')
    data_gen.add_constant_column('Language__c', 'en_US')

    titles = [
        "Health", "Computers", "Music", "Tools", "Home", "Outdoors",
        "Jewelery", "Toys", "Grocery", "Clothing", "Games", "Automotive",
        "Beauty", "Garden", "Books", "Industrial", "Baby", "Kids", "Movies",
        "Sports", "Shoes", "Electronics"
    ]
    data_gen.add_formula_column('Title__c', titles)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c', 'ArticleNumber__c', 'CreatedDate__c',
        'Owner.External_Id__c', 'PublishStatus__c', 'IsLatestVersion__c',
        'IsVisibleInApp__c', 'IsVisibleInCsp__c', 'IsVisibleInPkb__c',
        'IsVisibleInPrm__c', 'KCSArticle__ka.External_Id__c', 'Title__c',
        'VersionNumber__c', 'Language__c', 'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #22
0
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()


    # load source file
    source_columns = ['AccountExternalId__c', 'AccountName__c']
    data_gen.load_source_file(source_file_name, source_columns)


    # rename columns
    data_gen.rename_column('AccountExternalId__c', 'External_Id__c')
    data_gen.rename_column('AccountName__c', 'Name')


    # filter out duplicate data
    data_gen.unique()


    # load shape data as dataset
    shape_columns = [
        'AccountExternalId__c',
        'AccountAnnualRevenue__c',
        'AccountNumberOfEmployees__c',
        'AccountBookings__c',
        'Region__c',
        'Owner.External_Id__c',
        'CloseDate',
        'CreatedDate__c']
    shape_dataset = data_gen.load_dataset('shape', source_file_name, shape_columns)


    # build map of account values
    shape_account_map = shape_dataset.group_by('AccountExternalId__c')


    # helper method to get shape data related to an account
    def get_shape_data(column_values, shape_column_name):
        return shape_account_map.get(column_values['External_Id__c'])[0].get(shape_column_name)


    # generate owner
    def owner_formula(column_values):
        return get_shape_data(column_values, 'Owner.External_Id__c')
    data_gen.add_formula_column('Owner.External_Id__c', owner_formula)


    # update number employees based on shape data
    def employees_formula(column_values):
        employees = get_shape_data(column_values, 'AccountNumberOfEmployees__c')
        return randint(*account.client_size_employees_bands[employees])
    data_gen.add_formula_column('NumberOfEmployees', employees_formula)


    # update annual revenue based on shape data
    def revenue_formula(column_values):
        revenue = get_shape_data(column_values, 'AccountAnnualRevenue__c')
        return 1000 * randint(*account.client_size_rev_bands[revenue])
    data_gen.add_formula_column('AnnualRevenue', revenue_formula)


    # generate account source
    data_gen.add_formula_column('AccountSource', formula=account.account_source)


    # update type based on shape data
    def type_formula(column_values):
        return get_shape_data(column_values, 'AccountAnnualRevenue__c')
    data_gen.add_formula_column('Type', type_formula)


    # generate industry
    data_gen.add_formula_column('Industry', formula=account.account_industry)


    # generate billing street
    data_gen.add_formula_column('BillingStreet', formula=lambda: fake.building_number() + ' ' + fake.street_name())


    # generate billing city
    data_gen.add_formula_column('BillingCity', formula=fake.city)


    # update billing state based on shape data
    def state_formula(column_values):
        region = get_shape_data(column_values, 'Region__c')
        return choice(account.region_state_map[region])
    data_gen.add_formula_column('BillingState', state_formula)


    # generate billing country
    data_gen.add_constant_column('BillingCountry', 'USA')


    # generate year started
    data_gen.add_formula_column('YearStarted', formula=account.account_year_started)


    # generate ownership
    data_gen.add_formula_column('Ownership', formula=account.account_ownership)


    # generate rating
    data_gen.add_formula_column('Rating', formula=account.account_rating)


    # generate earliest created date
    def create_date_formula(column_values):
        opptys = shape_account_map.get(column_values['External_Id__c'])
        create_dates = [dateutil.parser.parse(oppty['CreatedDate__c']) for oppty in opptys]
        create_dates.sort()
        return create_dates[0]
    data_gen.add_formula_column('CreatedDate__c', create_date_formula)


    # generate earliest close date
    def close_date_formula(column_values):
        opptys = shape_account_map.get(column_values['External_Id__c'])
        close_dates = [dateutil.parser.parse(oppty['CloseDate']).date() for oppty in opptys]
        close_dates.sort()
        return close_dates[0]
    data_gen.add_formula_column('OpportunityCloseDate__c', close_date_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()
    data_gen.write(output_file_name)
def run(source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    # find mean and std of profit
    profits = []
    for row in data_gen.rows:
        column_values = data_gen.row_to_column_values(row)
        profits.append(float(column_values['Profit']))


    profit_mean = mean(profits)
    profit_std = std(profits)

    # filter out profits more than 2 std out.
    def filter_func(column_values):
        profit = float(column_values['Profit'])
        z_score = abs((profit - profit_mean) / profit_std)
        return z_score <= 2
    data_gen.filter(filter_function=filter_func)


    store_tier_map = {
        'New York 4': "Tier 1",
        'New York 3': "Tier 1",
        'New York 2': "Tier 1",
        'New York 1': "Tier 1",
        'Chicago 3': "Tier 1",
        'Chicago 2': "Tier 2",
        'Chicago 1': "Tier 2",
        'Boston 2': "Tier 2",
        'Boston 1': "Tier 3"
    }
    data_gen.add_map_column('Tier', 'Store', store_tier_map)


    month_channel_map = {
        'January': 'Chat',
        'February': 'Chat',
        'March': 'Chat',
        'April': 'Chat',
        'May': 'Chat',
        'June': 'Email',
        'July': 'Email',
        'August': 'Facebook',
        'September': 'Phone',
        'October': 'Phone',
        'November': 'Website',
        'December': 'Website'
    }
    data_gen.add_map_column('Origin', 'Month', month_channel_map)


    discount_support_map = {
        '0': 'Free',
        '0.05': 'Free',
        '0.15': 'Basic',
        '0.1': 'Silver',
        '0.2': 'Platinum'
    }
    data_gen.add_map_column('Type_of_Support__c', 'Discount', discount_support_map)


    camp_reason_map = {
        "Bundled": "Documentation",
        "Buy More & Save": "Unknown Failure",
        "Competitor Focus": "Feature Question",
        "Door Buster": "Hardware Question",
        "Friends & Family": "Late Delivery",
        "Local": "Software Question",
        "Paper Circular": "General Question",
        "Regional": "Item Damaged",
        "Social": "Item Damaged"
    }
    data_gen.add_map_column('Reason', 'Marketing Campaign', camp_reason_map)


    city_priority_map = {
        "Boston": "Low",
        "Chicago": "Medium",
        "New York": "High"
    }
    data_gen.add_map_column('Priority', 'City', city_priority_map)


    comp_sla_map = {
        "High": "Violation",
        "Normal": "Compliant",
        "Low": "Compliant"
    }
    data_gen.add_map_column('SLA', 'Competition', comp_sla_map)


    data_gen.add_constant_column('Status', 'Closed')


    sla_first_contact_close_map = {
        'Compliant': lambda: choice(['true', 'false'], p=[.9, .1]),
        'Violation': lambda: choice(['true', 'false'], p=[.7, .3])
    }
    data_gen.add_map_column('First_Contact_Close__c', 'SLA', sla_first_contact_close_map)


    sla_time_open_map = {
        'Compliant': lambda: choice([12, 24, 36, 48], p=[.50, .20, .20, .10]),
        'Violation': lambda: choice([60, 72, 84, 96, 108, 120], p=[.60, .20, .10, .05, .03, .02])
    }
    data_gen.add_map_column('Time_Open__c', 'SLA', sla_time_open_map)


    def region_formula(column_values):
        average_age = float(column_values['Average Age'])
        if average_age < 40:
            return 'West CSR'
        elif average_age >= 40.0 and average_age < 50:
            return 'Central CSR'
        else:
            return 'East CSR'
    data_gen.add_formula_column('Team__c', region_formula)


    def user_formula(column_values):
        average_age = float(column_values['Average Age'])
        if average_age < 40:
            return 'W_Services_User.' + str(choice([1, 2, 3, 4, 5]))
        elif average_age >= 40.0 and average_age < 50:
            return 'W_Services_User.' + str(choice([6, 7, 8, 9, 10, 11]))
        else:
            return 'W_Services_User.' + str(choice([12, 13, 14, 15, 16, 17]))
    data_gen.add_formula_column('Owner.External_Id__c', user_formula)


    # generate offer voucher - give vouchers to customers that were unhappy with Video Games or Cables to boost CSAT
    def offer_voucher_formula(column_values):
        csat = float(column_values['Profit Linear'])
        item = column_values['Item']

        if item in ['Video Games', 'Cables']:
            return choice(['true', 'false'], p=[csat/100, (100 - csat) / 100])
        else:
            return 'false'
    data_gen.add_formula_column('Offer_Voucher__c', offer_voucher_formula)


    def send_field_service_formula(column_values):
        csat = float(column_values['Profit Linear'])
        item = column_values['Item']

        if csat >= 80.0 and item == 'Tablet':
            return 'true'
        else:
            return choice(['true', 'false'], p=[.25, .75])
    data_gen.add_formula_column('Send_FieldService__c', send_field_service_formula)

    data_gen.add_map_column('IsEscalated', 'Tier', {'Tier 1': 'true', None: 'false'})

    # generate close date offset
    # random offset covering the last 14 months
    data_gen.add_formula_column('close_date_offset', lambda: randint(1, 30 * 14))


    # generate account id - generate a long tail distribution - cubic function +- randint
    # helper dataset used for account selection
    data_gen.add_dataset('current_account', {'account_id': 0, 'account_count': 0})


    # generate a distribution of account ids
    def account_id_formula(column_values):
        current_account = data_gen.datasets['current_account']
        account_id = current_account['account_id']
        account_count = current_account['account_count']

        if account_count > 0:
            # continue with the current account_id if there are still any to take
            # but first decrement account count
            account_count += -1
            current_account['account_count'] = account_count
        else:
            # use new account id
            account_id += 1
            account_count = int(round(lognormal(1))) + randint(1, 7)

            # update account dataset for next iteration
            account_count += -1
            current_account['account_count'] = account_count
            current_account['account_id'] = account_id
        return 'W_Services_Account.' + str(account_id)
    data_gen.add_formula_column('Account.External_Id__c', account_id_formula)

    def csat_formula(column_values):
        # first normalize csat between 30-90
        csat = float(column_values['Profit Linear'])
        new_delta = 70
        csat = (new_delta * csat / 100) + 30
        channel = column_values['Origin']
        is_escalated = column_values['IsEscalated']
        send_field_service = column_values['Send_FieldService__c']
        offer_voucher = column_values['Offer_Voucher__c']

        if is_escalated == 'true':
            if channel == 'Phone':
                csat = csat - 2
            else:
                csat = csat + 2

        if send_field_service == 'true':
            if channel == 'Phone':
                csat = csat - 2
            else:
                csat = csat + 4

        if offer_voucher == 'true':
            if channel == 'Phone':
                csat = csat - 2
            else:
                csat = csat + 4

        return csat
    data_gen.add_formula_column('CSAT__c', formula=csat_formula)

    data_gen.add_map_column('Outlier', 'Outlier', value_map={
        'TRUE': 'true',
        None: 'false'
    })

    data_gen.apply_transformations()


    data_gen.add_map_column('Time_Open__c', 'First_Contact_Close__c', value_map={
        'true': 0,
        None: lambda cv: cv['Time_Open__c']
    })

    data_gen.apply_transformations()

    rename_map = {
        'Item': 'Product_Family_KB__c'
    }
    data_gen.rename_columns(rename_map)

    output_columns = [
        'Origin',
        'Store',
        'Tier',
        'Product_Family_KB__c',
        'Priority',
        'Average Age',
        'Percent Male',
        'SLA',
        'Daily Revenue',
        'Reason',
        'Reg Price',
        'Type_of_Support__c',
        'Price',
        'Quantity',
        'Cost',
        'Profit',
        'CSAT__c',
        'Profit Log',
        'Outlier',
        'Status',
        'First_Contact_Close__c',
        'Time_Open__c',
        'Team__c',
        'Owner.External_Id__c',
        'close_date_offset',
        'Account.External_Id__c',
        'Offer_Voucher__c',
        'Send_FieldService__c',
        'IsEscalated'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #24
0
def run(batch_id, source_file_name, output_file_name, reference_date=today_datetime, filter_function=None):

    def get_close_date(values):
        return dateutil.parser.parse(values['CloseDate'])

    def get_create_date(values):
        return dateutil.parser.parse(values['CreatedDate__c'])

    data_gen = DataGenerator()

    # load source file
    data_gen.load_source_file(source_file_name)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # add an age column
    data_gen.add_copy_column('Age__c', 'TimeToClose__c')

    # generate a close date
    def close_date_formula(column_values):
        last_day = date(date.today().year, 12, 31)
        offset = column_values['close_date_offset__c']
        # last day of current year - offset
        close_date = last_day - timedelta(days=int(offset))
        return str(close_date)
    data_gen.add_formula_column('CloseDate', close_date_formula)


    # generate a create date
    def create_date_formula(column_values):
        close_date = dateutil.parser.parse(column_values['CloseDate'])
        offset = column_values['TimeToClose__c']
        create_date = close_date - timedelta(days=int(offset))
        return create_date.isoformat(sep=' ')
    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    # generate last activity date
    def last_activity_date_formula(column_values):
        create_date = get_create_date(column_values)
        close_date = get_close_date(column_values)
        if close_date > reference_date:
            close_date = reference_date
        if create_date > reference_date:
            create_date = reference_date
        return fake.date_time_between_dates(create_date, close_date).date()
    data_gen.add_formula_column('LastActivityDate__c', formula=last_activity_date_formula)

    data_gen.apply_transformations()

    if filter_function:
        data_gen.filter(filter_function)

    new_rows = []
    row_count = len(data_gen.rows)
    for i in range(row_count):
        row = data_gen.rows.pop()
        column_values = data_gen.row_to_column_values(row)

        close_day = get_close_date(column_values)
        create_day = get_create_date(column_values)

        # if close date is before reference date keep it exactly as is
        if close_day <= reference_date:
            new_rows.append(row)

        # if create date is before reference date, but the close date is after reference date
        elif (create_day <= reference_date) and (close_day > reference_date):
            # set age
            age = (reference_date - create_day).days
            column_values['Age__c'] = age

            ttc = float(column_values['TimeToClose__c'])
            pct = age / ttc

            # set IsClosed to blank
            column_values['IsClosed'] = ''

            # set IsWon to blank
            column_values['IsWon'] = ''

            # set a stage name
            stage_name_index = int(floor(pct * 4) + choice([-1, 0, 1], p=[.2, .7, .1]))

            # adjust the stage name index
            if stage_name_index < 0:
                stage_name_index = 0
            if stage_name_index > 3:
                stage_name_index = 3

            column_values['StageName'] = definitions.stage_name[stage_name_index]

            column_values['Probability'] = definitions.probabilities[stage_name_index]

            column_values['ForecastCategory'] = definitions.forecast_category[choice([1, 2, 4], p=[.625, .25, .125])]

            column_values['ForecastCategoryName'] = definitions.forecast_category_name[column_values['ForecastCategory']]

            column_values['SalesStageCount__c'] = ceil(pct * float(column_values['SalesStageCount__c']))

            new_rows.append(data_gen.column_values_to_row(column_values))



    data_gen.rows = new_rows
    data_gen.reverse()

    data_gen.write(output_file_name)
def run(batch_id,
        source_file_name,
        output_file_name,
        reference_datetime=today):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'CreatedDate__c',
        'ClosedDate__c', 'Origin'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('External_Id__c', 'Case.External_Id__c')
    data_gen.rename_column('ClosedDate__c', 'EndTime__c')

    data_gen.duplicate_rows(duplication_factor=lambda: choice(
        [1, 2, 3, 4, 5], p=[.65, .15, .10, .05, .05]))

    data_gen.add_formula_column(
        'External_Id__c',
        lambda: 'W_LiveChatTranscript.' + str(data_gen.current_row + 1))

    data_gen.add_formula_column('Abandoned__c', lambda: randint(1, 300))

    data_gen.add_formula_column('AverageResponseTimeOperator__c',
                                lambda: randint(1, 180))

    data_gen.add_formula_column('AverageResponseTimeVisitor__c',
                                lambda: randint(1, 180))

    data_gen.add_formula_column('Body__c', formula=fake.body)

    data_gen.add_formula_column('Browser__c', formula=fake.browser)

    data_gen.add_constant_column('BrowserLanguage__c', 'en_US')

    data_gen.add_formula_column('ChatDuration__c', lambda: randint(1, 600))

    data_gen.add_formula_column('ChatKey__c', formula=fake.md5)

    data_gen.add_formula_column('IpAddress__c', formula=fake.ipv4)

    data_gen.add_formula_column('LiveChatButton.DeveloperName',
                                ['Public_Website_Chat_Button'])

    data_gen.add_formula_column('Location__c', formula=fake.city)

    data_gen.add_formula_column('MaxResponseTimeOperator__c',
                                lambda: randint(1, 120))

    data_gen.add_formula_column('MaxResponseTimeVisitor__c',
                                lambda: randint(1, 240))

    data_gen.add_formula_column('Name__c',
                                lambda: str(data_gen.current_row + 1).zfill(8))

    data_gen.add_formula_column('OperatorMessageCount__c',
                                lambda: randint(1, 100))

    data_gen.add_formula_column(
        'Platform__c', ['MacOSX', 'iOS', 'Android', 'Windows', 'Unix'])

    referrer = [
        "https://na17.salesforce.com/setup/forcecomHomepage.apexp?setupid=ForceCom&retURL=%2Fui%2Fsupport%2Fservicedesk%2FServiceDeskPage",
        "https://na13.salesforce.com/home/home.jsp",
        "https://sdodemo-main.force.com/partners/servlet/servlet.Integration?lid=01ra0000001VlbA&ic=1",
        "https://sitestudio.na17.force.com/?exitURL=%2F_ui%2Fnetworks%2Fsetup%2FSetupNetworksPage%2Fd",
        "https://mail.google.com/mail/u/0/",
        "https://sdodemo-main.force.com/customers/servlet/servlet.Integration?lid=01ra0000001VlbP&ic=1",
        "https://sdodemo-main.force.com/consumers/servlet/servlet.Integration?lid=01ro0000000EN78&ic=1",
        "https://na17.salesforce.com/servlet/servlet.su?oid=00D300000007EfQ&retURL=%2F0033000000PuxU2&sunetworkuserid=005a000000AuCha&sunetworkid=0DBo0000000Gn4h",
        "https://sdodemo-main.force.com/customers/servlet/servlet.Integration?ic=1&lid=01ra0000001VlbP"
    ]
    data_gen.add_formula_column('ReferrerUri__c', referrer)

    def create_date_formula(column_values):
        case_create_date = dateutil.parser.parse(
            column_values['CreatedDate__c'])
        case_close_date = dateutil.parser.parse(column_values['EndTime__c'])
        create_date = fake.date_time_between_dates(case_create_date,
                                                   case_close_date)
        if create_date > reference_datetime:
            create_date = reference_datetime
        return create_date.isoformat(sep=' ')

    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    def start_time_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        start_time = create_date + timedelta(seconds=randint(1, 300))
        return start_time.isoformat(sep=' ')

    data_gen.add_formula_column('StartTime__c', start_time_formula)

    def end_time_formula(column_values):
        create_date = dateutil.parser.parse(column_values['StartTime__c'])
        end_time = create_date + timedelta(seconds=randint(1, 600))
        return end_time.isoformat(sep=' ')

    data_gen.add_formula_column('EndTime__c', end_time_formula)

    data_gen.add_copy_column('RequestTime__c', 'CreatedDate__c')

    data_gen.add_formula_column(
        'Status__c', lambda: choice(['Missed', 'Completed'], p=[.20, .80]))

    data_gen.add_map_column('EndedBy__c', 'Status__c', {
        'Completed': ['Visitor', 'Agent'],
        None: 'Visitor'
    })

    data_gen.add_constant_column('SupervisorTranscriptBody__c', '')

    data_gen.add_constant_column('ScreenResolution__c', '')

    data_gen.add_formula_column('UserAgent__c', formula=fake.user_agent)

    data_gen.add_formula_column('VisitorMessageCount__c',
                                lambda: randint(1, 50))

    data_gen.add_formula_column('WaitTime__c', lambda: randint(1, 120))

    def last_referenced_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        last_referenced_date = create_date + timedelta(seconds=randint(1, 300))
        return last_referenced_date.isoformat(sep=' ')

    data_gen.add_formula_column('LastReferencedDate__c',
                                last_referenced_date_formula)

    data_gen.add_copy_column('LastViewedDate__c', 'LastReferencedDate__c')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    def filter_func(column_values):
        return column_values['Origin'] == 'Chat'

    data_gen.filter(filter_function=filter_func)

    # apply transformations and write file
    data_gen.apply_transformations()

    data_gen.sort_by('StartTime__c')

    output_columns = [
        'External_Id__c', 'Abandoned__c', 'AverageResponseTimeOperator__c',
        'MaxResponseTimeOperator__c', 'OperatorMessageCount__c', 'Body__c',
        'Browser__c', 'BrowserLanguage__c', 'Case.External_Id__c',
        'ChatDuration__c', 'ChatKey__c', 'CreatedDate__c', 'StartTime__c',
        'EndTime__c', 'EndedBy__c', 'LastReferencedDate__c',
        'LastViewedDate__c', 'LiveChatButton.DeveloperName', 'Location__c',
        'Owner.External_Id__c', 'Platform__c', 'ReferrerUri__c',
        'ScreenResolution__c', 'RequestTime__c', 'Status__c',
        'SupervisorTranscriptBody__c', 'UserAgent__c',
        'AverageResponseTimeVisitor__c', 'IpAddress__c',
        'MaxResponseTimeVisitor__c', 'VisitorMessageCount__c', 'WaitTime__c',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
def run(batch_id,
        source_file_name,
        output_file_name,
        reference_datetime=today):
    data_gen = DataGenerator()

    # load source file
    account_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'OpportunityCloseDate__c'
    ]
    data_gen.load_source_file(source_file_name, account_columns)

    data_gen.rename_column('External_Id__c', 'Account.External_Id__c')
    data_gen.rename_column('OpportunityCloseDate__c', 'CreatedDate__c')

    # generate a random number of cases per account
    data_gen.duplicate_rows(
        duplication_factor=lambda: int(lognormal(0) + randint(0, 2)))

    data_gen.add_formula_column(
        'External_Id__c',
        formula=lambda: 'W_Sales_Case.' + str(data_gen.current_row + 1))

    # generate contact
    def contact_formula(column_values):
        return column_values['Account.External_Id__c'].replace(
            'W_Account', 'W_Contact')

    data_gen.add_formula_column('Contact.External_Id__c', contact_formula)

    data_gen.add_formula_column('IsEscalated', case.case_is_escalated)
    data_gen.add_formula_column('CSAT__c', case.case_csat)

    data_gen.add_formula_column('Origin', formula=case.case_origin)
    data_gen.add_formula_column('Type', formula=case.case_type)
    data_gen.add_formula_column('Subject', formula=case.case_subject)
    data_gen.add_formula_column('Priority', formula=case.case_priority)

    data_gen.add_formula_column('Status', formula=case.case_status)

    def create_date_formula(column_values):
        oppty_close_date = dateutil.parser.parse(
            column_values['CreatedDate__c'])
        create_date = oppty_close_date + timedelta(days=randint(0, 90))
        if create_date > reference_datetime:
            create_date = reference_datetime
        return create_date.isoformat(sep=' ')

    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    def close_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        if column_values['Status'] == 'Closed':
            close_date = create_date + timedelta(days=randint(0, 10))
            if close_date > reference_datetime:
                close_date = reference_datetime
            return close_date.isoformat(sep=' ')
        else:
            return ''

    data_gen.add_formula_column('ClosedDate__c', close_date_formula)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()
    data_gen.write(output_file_name)
Exemple #27
0
def run(batch_id,
        source_file_name,
        output_file_name,
        reference_date=today_datetime):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'StageName', 'Amount', 'ForecastCategory',
        'CloseDate', 'CreatedDate__c', 'SalesStageCount__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_columns({
        'StageName': 'StageName__c',
        'Amount': 'Amount__c',
        'ForecastCategory': 'ForecastCategory__c',
        'CloseDate': 'CloseDate__c'
    })

    data_gen.add_copy_column('Opportunity.External_Id__c', 'External_Id__c')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()

    stages = ['Qualification', 'Discovery', 'Proposal/Quote', 'Negotiation']
    forecast_categories = ['BestCase', 'Pipeline', 'Commit']

    pipe_bucket = [
        'No Change', 'Reopen', 'Expand', 'Reduce', 'Moved Out', 'Moved In',
        'Stage Change'
    ]
    pipe_bucket_ratio = [0.10, 0.05, 0.15, 0.15, 0.30, 0.10, 0.15]
    qualification_pipe_bucket = [
        'No Change', 'Reopen', 'Expand', 'Reduce', 'Moved Out', 'Moved In'
    ]
    qualification_pipe_bucket_ratio = [0.20, 0.05, 0.20, 0.10, 0.35, 0.10]
    zero_amount_pipe_bucket = [
        'No Change', 'Reopen', 'Moved Out', 'Moved In', 'Stage Change'
    ]
    zero_amount_pipe_bucket_ratio = [0.20, 0.05, 0.35, 0.10, 0.30]

    current_count = 1
    new_rows = []
    row_count = len(data_gen.rows)
    for i in range(row_count):
        row = data_gen.rows.pop()
        column_values = data_gen.row_to_column_values(row)

        opportunity_id = column_values['Opportunity.External_Id__c']
        close_date = dateutil.parser.parse(column_values['CloseDate__c'])
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        final_amount = int(column_values['Amount__c'])
        final_forecast_category = column_values['ForecastCategory__c']
        final_stage_name = column_values['StageName__c']
        stage_count = int(column_values['SalesStageCount__c'])

        # initialize most recent event date to reference_date or earlier
        event_date_range_start = create_date + (close_date - create_date) / 2
        event_date_range_end = close_date

        if close_date > reference_date:
            event_date_range_end = reference_date
            event_date_range_start = create_date + (reference_date -
                                                    create_date) / 2

        # ensure event happens on or after opportunity create_date
        event_date = fake.date_time_between_dates(event_date_range_start,
                                                  event_date_range_end)

        # create final state
        column_values['CreatedDate__c'] = event_date
        column_values['External_Id__c'] = 'W_OpportunityHistory.' + str(
            current_count)
        current_count += 1
        new_rows.append(data_gen.column_values_to_row(column_values))

        next_create_date = event_date
        next_stage_name = final_stage_name
        next_forecast_category = final_forecast_category
        next_close_date = close_date
        next_amount = final_amount

        movedOut = False
        movedIn = False
        expand = False
        reduce = False
        reopen = False
        initialized = False

        # generate events in reverse order until create_date
        for current_stage_count in range(stage_count):
            # choose the proper bucket depending on the scenario
            bucket = pipe_bucket
            ratio = pipe_bucket_ratio
            if next_amount <= 0:
                bucket = zero_amount_pipe_bucket
                ratio = zero_amount_pipe_bucket_ratio
            elif next_stage_name == 'Qualification':
                bucket = qualification_pipe_bucket
                ratio = qualification_pipe_bucket_ratio

            event = choice(bucket, p=ratio)

            event_date_range_end = event_date
            event_date_range_start = create_date + (event_date -
                                                    create_date) / 2
            event_date = fake.date_time_between_dates(event_date_range_start,
                                                      event_date_range_end)

            # if next stage is closed, make the previous event a stage change
            if 'Closed' in next_stage_name:
                event = 'Stage Change'

            # if the event date is the create date, create the initial state
            if current_stage_count == stage_count - 1:
                event_date = create_date
                event = 'Initial State'

            if event != 'No Change':
                curr_close_date = next_close_date
                curr_amount = next_amount
                curr_stage_name = next_stage_name
                curr_forecast_category = next_forecast_category

                if event == 'Reopen' and not reopen:
                    curr_stage_name = 'Closed Lost'
                    curr_forecast_category = 'Omitted'
                    reopen = True
                elif event == 'Initial State':
                    curr_stage_name = 'Qualification'
                    curr_forecast_category = 'Pipeline'
                    initialized = True
                elif event == 'Expand' and not expand:
                    curr_amount = next_amount - int(
                        uniform(.15, .45) * final_amount)
                    if curr_amount <= 0:
                        # reduce instead
                        curr_amount = next_amount + int(
                            uniform(.15, .45) * final_amount)
                    expand = True
                elif event == 'Reduce' and not reduce:
                    curr_amount = next_amount + int(
                        uniform(.15, .45) * final_amount)
                    reduce = True
                elif event == 'Moved In' and not movedIn:
                    curr_close_date = curr_close_date + timedelta(
                        days=randint(0, 30))
                    movedIn = True
                elif event == 'Moved Out' and not movedOut:
                    curr_close_date = curr_close_date - timedelta(
                        days=randint(30, 90))
                    movedOut = True
                elif event == 'Stage Change':
                    # if next stage is not closed, use previous stage
                    if 'Closed' not in next_stage_name and stages.index(
                            next_stage_name) - 1 > 0:
                        curr_stage_name = stages[stages.index(next_stage_name)
                                                 - 1]
                    # if next stage is closed, use any stage
                    elif 'Closed' in next_stage_name:
                        curr_stage_name = stages[randint(1, len(stages) - 1)]
                    else:
                        curr_stage_name = stages[0]
                    curr_forecast_category = forecast_categories[randint(
                        0,
                        len(forecast_categories) - 1)]

                new_column_values = {
                    'External_Id__c':
                    'W_OpportunityHistory.' + str(current_count),
                    'Opportunity.External_Id__c': opportunity_id,
                    'StageName__c': curr_stage_name,
                    'Amount__c': curr_amount,
                    'ForecastCategory__c': curr_forecast_category,
                    'CreatedDate__c': event_date.isoformat(sep=' '),
                    'CloseDate__c': curr_close_date.date().isoformat(),
                    'analyticsdemo_batch_id__c': batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))

                next_stage_name = curr_stage_name
                next_forecast_category = curr_forecast_category
                next_close_date = curr_close_date
                next_amount = curr_amount

    data_gen.rows = new_rows
    data_gen.reverse()

    data_gen.write(output_file_name, [
        'External_Id__c', 'Amount__c', 'StageName__c', 'ForecastCategory__c',
        'CloseDate__c', 'CreatedDate__c', 'Opportunity.External_Id__c',
        'analyticsdemo_batch_id__c'
    ])
def run(batch_id, source_file_name, output_file_name, accounts_file_name, contacts_file_name):
    data_gen = DataGenerator()


    # load source file
    source_columns = [
        'External_Id__c',
        'AccountExternalId__c',
        'Owner.External_Id__c',
        'LeadSource',
        'CloseDate',
        'CreatedDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    # load accounts as dataset
    account_columns = [
        'External_Id__c',
        'Name',
        'BillingState',
        'Industry'
    ]
    account_dataset = data_gen.load_dataset('accounts', accounts_file_name, account_columns)
    accounts_by_id = account_dataset.group_by('External_Id__c')


    # load contacts as dataset
    contact_columns = [
        'External_Id__c',
        'FirstName',
        'LastName'
    ]
    contact_dataset = data_gen.load_dataset('contacts', contacts_file_name, contact_columns)
    contacts_by_id = contact_dataset.group_by('External_Id__c')


    # helper method to get account data
    def get_account_data(column_values, account_column_name):
        return accounts_by_id.get(column_values['ConvertedAccount.External_Id__c'])[0].get(account_column_name)


    # helper method to get contact data
    def get_contact_data(column_values, contact_column_name):
        return contacts_by_id.get(column_values['ConvertedContact.External_Id__c'])[0].get(contact_column_name)


    # rename columns
    data_gen.rename_column('External_Id__c', 'ConvertedOpportunity.External_Id__c')
    data_gen.rename_column('AccountExternalId__c', 'ConvertedAccount.External_Id__c')
    data_gen.rename_column('CloseDate', 'ConvertedDate__c')


    # generate converted lead at a random ratio
    data_gen.duplicate_rows(duplication_factor=lambda: choice([0, 1], p=[.75, .25]))


    # generate id
    data_gen.add_formula_column('External_Id__c', formula=lambda: 'W_Lead.' + str(data_gen.current_row + 1))


    # generate create date
    def create_date_formula(column_values):
        oppty_create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        return oppty_create_date - timedelta(days=randint(0, 45))
    data_gen.add_formula_column('CreatedDate__c', create_date_formula)


    # generate status
    data_gen.add_formula_column('Status', formula=lead.lead_status)


    # generate status
    data_gen.add_map_column('IsConverted', 'Status', {
        'Qualified - Convert': 'true',
        None: 'false'
    })


    # generate opportunity
    data_gen.add_map_column('ConvertedOpportunity.External_Id__c', 'Status', {
        'Qualified - Convert': lambda cv: cv['ConvertedOpportunity.External_Id__c'],
        None: ''
    })


    # generate account
    data_gen.add_map_column('ConvertedAccount.External_Id__c', 'Status', {
        'Qualified - Convert': lambda cv: cv['ConvertedAccount.External_Id__c'],
        None: ''
    })


    # generate contact
    data_gen.add_map_column('ConvertedContact.External_Id__c', 'Status', {
        'Qualified - Convert': lambda cv: cv['ConvertedAccount.External_Id__c'].replace('W_Account', 'W_Contact'),
        None: ''
    })


    # generate converted date
    data_gen.add_map_column('ConvertedDate__c', 'Status', {
        'Qualified - Convert': lambda cv: cv['ConvertedDate__c'],
        None: ''
    })


    # generate name
    data_gen.add_map_column('FirstName', 'Status', {
        'Qualified - Convert': lambda cv: get_contact_data(cv, 'FirstName'),
        None: lambda: fake.first_name()
    })


    data_gen.add_map_column('LastName', 'Status', {
        'Qualified - Convert': lambda cv: get_contact_data(cv, 'LastName'),
        None: lambda: fake.last_name()
    })


    # generate company
    data_gen.add_map_column('Company', 'Status', {
        'Qualified - Convert': lambda cv: get_account_data(cv, 'Name'),
        None: 'Not Applicable'
    })


    # generate industry
    data_gen.add_map_column('Industry', 'Status', {
        'Qualified - Convert': lambda cv: get_account_data(cv, 'Industry'),
        None: ''
    })


    # generate state
    data_gen.add_map_column('State', 'Status', {
        'Qualified - Convert': lambda cv: get_account_data(cv, 'BillingState'),
        None: ''
    })


    # generate is unread by owner
    data_gen.add_map_column('IsUnreadByOwner', 'Status', {
        'Qualified - Convert': 'false',
        None: lead.lead_is_unread_by_owner
    })

    # generate rating
    data_gen.add_formula_column('Rating', formula=lead.lead_rating)

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write file
    data_gen.apply_transformations()
    data_gen.write(output_file_name)
def run(batch_id, source_file_name, output_file_name):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c', 'Owner.External_Id__c', 'CreatedDate__c',
        'EndTime__c', 'EndedBy__c', 'Status__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)

    data_gen.rename_column('Owner.External_Id__c', 'Agent.External_Id__c')

    data_gen.add_copy_column('LiveChatTranscript.External_Id__c',
                             'External_Id__c')
    data_gen.add_copy_column('Time__c', 'CreatedDate__c')

    data_gen.add_constant_column('Type__c', '')
    data_gen.add_constant_column('Detail__c', '')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    data_gen.apply_transformations()

    type_detail_map = {
        "ChatRequest": "Visitor requested chat.",
        "ChoiceRoute":
        "Choice chat request routed to all available qualified agents.",
        "CancelNoAgent":
        "Chat request canceled because no qualifying agents were available.",
        "Accept": "Chat request accepted by agent.",
        "CancelVisitor": "Visitor clicked Cancel Chat.",
        "LeaveAgent": "Agent left chat.",
        "EndAgent": "Agent clicked End Chat.",
        "LeaveVisitor": "Visitor left chat.",
        "EndVisitor": "Visitor clicked End Chat."
    }

    current_count = 1
    new_rows = []
    row_count = len(data_gen.rows)
    for i in range(row_count):
        row = data_gen.rows.pop()
        column_values = data_gen.row_to_column_values(row)

        live_chat = column_values['LiveChatTranscript.External_Id__c']
        agent = column_values['Agent.External_Id__c']
        create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        end_date = dateutil.parser.parse(column_values['EndTime__c'])
        ended_by = column_values['EndedBy__c']
        status = column_values['Status__c']

        # initialize chat request
        new_column_values = {
            'External_Id__c':
            'W_LiveChatTranscriptEvent.' + str(current_count),
            'LiveChatTranscript.External_Id__c': live_chat,
            'Agent.External_Id__c': agent,
            'CreatedDate__c': create_date.isoformat(sep=' '),
            'Time__c': create_date.isoformat(sep=' '),
            'Type__c': 'ChatRequest',
            'Detail__c': 'Visitor requested chat.',
            'analyticsdemo_batch_id__c': batch_id
        }
        current_count += 1
        new_rows.append(data_gen.column_values_to_row(new_column_values))

        if status == 'Missed':
            type__c = choice(['CancelVisitor', 'CancelNoAgent'])
            if type__c == 'CancelNoAgent':
                # no agents
                create_date = fake.date_time_between_dates(
                    create_date, end_date)
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    create_date.isoformat(sep=' '),
                    'Time__c':
                    create_date.isoformat(sep=' '),
                    'Type__c':
                    'ChoiceRoute',
                    'Detail__c':
                    'Choice chat request routed to all available qualified agents.',
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))

                create_date = fake.date_time_between_dates(
                    create_date, end_date)
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    create_date.isoformat(sep=' '),
                    'Time__c':
                    create_date.isoformat(sep=' '),
                    'Type__c':
                    type__c,
                    'Detail__c':
                    type_detail_map[type__c],
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))

                type__c = choice(['LeaveVisitor', 'EndVisitor'])
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    end_date.isoformat(sep=' '),
                    'Time__c':
                    end_date.isoformat(sep=' '),
                    'Type__c':
                    type__c,
                    'Detail__c':
                    type_detail_map[type__c],
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))
            else:
                # visitor canceled
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    end_date.isoformat(sep=' '),
                    'Time__c':
                    end_date.isoformat(sep=' '),
                    'Type__c':
                    type__c,
                    'Detail__c':
                    type_detail_map[type__c],
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))
        else:
            type__c = 'ChoiceRoute'
            new_column_values = {
                'External_Id__c':
                'W_LiveChatTranscriptEvent.' + str(current_count),
                'LiveChatTranscript.External_Id__c': live_chat,
                'Agent.External_Id__c': agent,
                'CreatedDate__c': create_date.isoformat(sep=' '),
                'Time__c': create_date.isoformat(sep=' '),
                'Type__c': type__c,
                'Detail__c': type_detail_map[type__c],
                'analyticsdemo_batch_id__c': batch_id
            }
            current_count += 1
            new_rows.append(data_gen.column_values_to_row(new_column_values))

            type__c = 'Accept'
            create_date = fake.date_time_between_dates(create_date, end_date)
            new_column_values = {
                'External_Id__c':
                'W_LiveChatTranscriptEvent.' + str(current_count),
                'LiveChatTranscript.External_Id__c': live_chat,
                'Agent.External_Id__c': agent,
                'CreatedDate__c': create_date.isoformat(sep=' '),
                'Time__c': create_date.isoformat(sep=' '),
                'Type__c': type__c,
                'Detail__c': type_detail_map[type__c],
                'analyticsdemo_batch_id__c': batch_id
            }
            current_count += 1
            new_rows.append(data_gen.column_values_to_row(new_column_values))

            if ended_by == 'Visitor':
                type__c = choice(['LeaveVisitor', 'EndVisitor'])
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    end_date.isoformat(sep=' '),
                    'Time__c':
                    end_date.isoformat(sep=' '),
                    'Type__c':
                    type__c,
                    'Detail__c':
                    type_detail_map[type__c],
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))
            else:
                type__c = choice(['LeaveAgent', 'EndAgent'])
                new_column_values = {
                    'External_Id__c':
                    'W_LiveChatTranscriptEvent.' + str(current_count),
                    'LiveChatTranscript.External_Id__c':
                    live_chat,
                    'Agent.External_Id__c':
                    agent,
                    'CreatedDate__c':
                    end_date.isoformat(sep=' '),
                    'Time__c':
                    end_date.isoformat(sep=' '),
                    'Type__c':
                    type__c,
                    'Detail__c':
                    type_detail_map[type__c],
                    'analyticsdemo_batch_id__c':
                    batch_id
                }
                current_count += 1
                new_rows.append(
                    data_gen.column_values_to_row(new_column_values))

    data_gen.rows = new_rows

    # apply transformations and write file
    output_columns = [
        'External_Id__c', 'LiveChatTranscript.External_Id__c',
        'Agent.External_Id__c', 'Type__c', 'Detail__c', 'CreatedDate__c',
        'Time__c', 'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)
Exemple #30
0
def run(batch_id, source_file_name, output_file_name, reference_datetime=today):
    data_gen = DataGenerator()

    # load source file
    source_columns = [
        'External_Id__c',
        'Owner.External_Id__c',
        'CreatedDate__c',
        'LastActivityDate__c'
    ]
    data_gen.load_source_file(source_file_name, source_columns)


    data_gen.rename_column('External_Id__c', 'Case.External_Id__c')

    data_gen.duplicate_rows(duplication_factor=lambda: randint(0, 3))


    data_gen.add_formula_column('External_Id__c', formula=lambda: 'W_Services_Event.' + str(data_gen.current_row + 1))


    data_gen.add_formula_column('Subject', formula=event.event_subject)
    data_gen.add_formula_column('EventSubtype', formula=event.event_subtype)
    data_gen.add_formula_column('DurationInMinutes', formula=event.event_call_duration)

    def create_date_formula(column_values):
        case_create_date = dateutil.parser.parse(column_values['CreatedDate__c'])
        case_close_date = datetime.combine(dateutil.parser.parse(column_values['LastActivityDate__c']), case_create_date.time())
        if case_close_date > reference_datetime:
            case_close_date = reference_datetime
        create_date = fake.date_time_between_dates(case_create_date, case_close_date)
        if create_date > reference_datetime:
            create_date = reference_datetime
        return create_date.isoformat(sep=' ')
    data_gen.add_formula_column('CreatedDate__c', create_date_formula)

    data_gen.add_copy_column('LastModifiedDate__c', 'CreatedDate__c')


    def activity_date_formula(column_values):
        create_date = dateutil.parser.parse(column_values['CreatedDate__c']).date()
        return (create_date + timedelta(days=randint(0, 14))).isoformat()
    data_gen.add_formula_column('ActivityDate', activity_date_formula)


    def activity_datetime_formula(column_values):
        return dateutil.parser.parse(column_values['ActivityDate'])
    data_gen.add_formula_column('ActivityDateTime', activity_datetime_formula)


    data_gen.add_constant_column('ShowAs', 'Busy')

    # add a UUID for each row that is created in this batch
    data_gen.add_constant_column('analyticsdemo_batch_id__c', batch_id)

    # apply transformations and write
    data_gen.apply_transformations()

    output_columns = [
        'External_Id__c',
        'Owner.External_Id__c',
        'Case.External_Id__c',
        'Subject',
        'EventSubtype',
        'DurationInMinutes',
        'ShowAs',
        'CreatedDate__c',
        'LastModifiedDate__c',
        'ActivityDate',
        'ActivityDateTime',
        'analyticsdemo_batch_id__c'
    ]
    data_gen.write(output_file_name, output_columns)