Exemplo n.º 1
0
def double_check(db):
    report_id = 24

    helper.backup_table(db, 'tblInvestmentReport')
    remove_report(db, report_id)

    data = db.get_data(qry_get_all_recommendation_funds())
    fund_ids = []
    for row in data:
        fund_id = row[1]
        report_url = row[2]
        if report_url:
            fund_ids += [[fund_id]]
            #logger.info('{}\t{}'.format(fund_id, report_url))
            #count = add_report(db, fund_id, report_id, report_url, analyst_id=56526, authoriser_id=56036)
            #logger.info('Adding report for {} - {}'.format(fund_id, count))
    create_table_query = '''
    create table {table_name} (
    id int
    )
    '''
    tt_name = TempTable.create_from_data(db, fund_ids, create_table_query)
    data = db.get_data('''
    select tt.ID, ve.stockCode as reportURL
    from {tt_name} tt
    left join vewEquities ve on tt.ID = ve.StockID
    left join tblInvestmentReport ir on tt.ID = ir.InvestmentID and ir.reportID={report_id} and ir.isActive=1
    where ir.reportURL is null
    '''.format(tt_name=tt_name, report_id=report_id))
    for row in data:
        fund_id = row[0]
        report_url = row[1]
        count = add_report(db, fund_id, report_id, report_url, analyst_id=56526, authoriser_id=56036)
        logger.info('Adding report for {} - {}'.format(fund_id, count))
Exemplo n.º 2
0
def upload_cost(db, excel_file, sheet_name_or_idx):
    tt_name = helper.upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx)

    # create other quries for attribute 19
    tt_name = TempTable.create_from_query(db, qry_clone_date_to_attribute_id_19(tt_name, "Fees & Indirect Costs Date"))

    count = db.execute(qry_update_last_date_of_month(tt_name))
    logger.info("{} updated to ensure [date] column is last day of the month\n".format(count))

    cols = db.sp_columns("{}".format(tt_name))
    attribute_lookup = {col: db.get_one_value(qry_get_attribute_id(), col) for col in cols}
    # remove columns not matched tblAttribute
    attribute_lookup = {k: v for k, v in attribute_lookup.items() if v is not None}

    # back up tblInvestmentAttribute
    backup_table(db, "tblInvestmentAttribute")

    for attribute_name, attribute_id in attribute_lookup.items():

        attribute_tt_name = TempTable.create_from_query(
            db, qry_create_temp_table_for_attribute_name(tt_name, attribute_name, attribute_id)
        )
        logger.info(
            "create table [{}] for attribute name [{}][{}] - Qty {}".format(
                attribute_tt_name, attribute_name, attribute_id, len(attribute_tt_name)
            )
        )

        count = db.execute(qry_update_DateTo_of_tbInvestmentAttribute(attribute_tt_name))
        logger.info("{} updated by closing off current attributes".format(count))

        count = db.execute(qry_insert_new_investment_attributes(attribute_tt_name))
        logger.info("{} inserted into tblInvestmentAttribute".format(count))
        assert count == len(attribute_tt_name)
        logger.info("\n")

    # cleaning up junk data due to re-run
    count = db.execute(qry_delete_rerun_data("tblInvestmentAttribute"))
    logger.info("{} deleted in tblInvestmentAttribute due to re-run".format(count))

    count = db.execute(qry_regenerate_report(tt_name, code_field="code"))
    logger.info("{} updated for regenerating report".format(count))
def extract(db, excel_file, sheet_name_or_idx, data_provider, output):
    # Import excel file into temp table
    tt_name = helper.upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx)
    another_tt_name = TempTable.create_from_query(db, qry_get_apircode_from_citicode(tt_name))
    orig_citi_codes = db.get_data('select citicode from {}'.format(tt_name))
    orig_citi_codes = {code.citicode for code in orig_citi_codes}
    logger.info(orig_citi_codes)

    assert len(tt_name) == len(another_tt_name)


    holding_tt_name = TempTable.create_from_query(db, qry_get_top_holdings(another_tt_name, data_provider))

    rows = db.get_data('select * from {}'.format(holding_tt_name))
    unique_citi_codes = {row[0] for row in rows}
    logger.info(orig_citi_codes-unique_citi_codes)

    csvwriter = csv.writer(output, lineterminator='\n')
    header = 'FundProductCode,HoldingCode,HoldingName,Weight,DateFrom'
    csvwriter.writerow(header.split(','))
    csvwriter.writerows(rows)
Exemplo n.º 4
0
def upload_cost(db, excel_file, sheet_name_or_idx):
    tt_name = upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx)

    # create other quries for attribute 19
    tt_name = TempTable.create_from_query(db, qry_clone_date_to_attribute_id_19(tt_name, 'Fees & Indirect Costs Date'))

    count = db.execute(qry_update_last_date_of_month(tt_name))
    logger.info('{} updated to ensure [date] column is last day of the month\n'.format(count))

    cols = db.sp_columns('{}'.format(tt_name))
    attribute_lookup = {col: db.get_one_value(qry_get_attribute_id(), col) for col in cols}
    # remove columns not matched tblAttribute
    attribute_lookup = {k: v for k, v in attribute_lookup.items() if v is not None}

    # back up tblInvestmentAttribute
    backup_table(db, 'tblInvestmentAttribute')

    for attribute_name, attribute_id in attribute_lookup.items():

        attribute_tt_name = TempTable.create_from_query(db, qry_create_temp_table_for_attribute_name(tt_name
                                                                                                    , attribute_name
                                                                                                    , attribute_id))
        logger.info('create table [{}] for attribute name [{}][{}] - Qty {}'.format(attribute_tt_name, attribute_name
                                                                                    , attribute_id
                                                                                    , len(attribute_tt_name)))

        count = db.execute(qry_update_DateTo_of_tbInvestmentAttribute(attribute_tt_name))
        logger.info('{} updated by closing off current attributes'.format(count))

        count = db.execute(qry_insert_new_investment_attributes(attribute_tt_name))
        logger.info('{} inserted into tblInvestmentAttribute'.format(count))
        assert count == len(attribute_tt_name)
        logger.info('\n')

    # cleaning up junk data due to re-run
    count = db.execute(qry_delete_rerun_data('tblInvestmentAttribute'))
    logger.info('{} deleted in tblInvestmentAttribute due to re-run'.format(count))

    count = db.execute(qry_regenerate_report(tt_name, code_field='code'))
    logger.info('{} updated for regenerating report'.format(count))
Exemplo n.º 5
0
def double_check(db, output):
    all_fe_tt = TempTable.create_from_query(db, qry_get_all_fe_current_gs())
    all_latest_gs_tt = TempTable.create_from_query(db, qry_get_latest_gs(all_fe_tt))

    logger.info(len(all_fe_tt))
    logger.info(len(all_latest_gs_tt))

    data = db.get_data('''
    select investmentID, investmentCode from {tt_name}
    except
    select investmentID, investmentCode from {tt_name_1}
    '''.format(tt_name=all_fe_tt, tt_name_1=all_latest_gs_tt))

    no_gs_why = [row for row in data]

    logger.info('-'*40)
    logger.info('Below codes do not have current growth series')
    logger.info(no_gs_why)
    logger.info('-'*40)

    data = db.get_data(qry_compare_last_gs_with_external(all_latest_gs_tt))
    data = [row for row in data]
    dict_data = collections.defaultdict(list)
    for row in data:
        dict_data[row[0]] += [row]

    final_data = []
    for investmentid, row in dict_data.items():
        if len(row) > 1:
            logger.info(row)
            continue
        row = list(row[0])
        row = row + [row[-3] == row[-4], row[-1] == row[-2]]
        #logger.info(row)
        final_data += [row]

    csvwriter = csv.writer(output, lineterminator='\n')
    csvwriter.writerow(['investmentID', 'investmentCode', 'Date', 'Value', 'ExternalValue', 'ValueExDiv', 'ExternalValueExDiv', 'APIRCode', 'ValueDif', 'ExDivDif'])
    csvwriter.writerows(final_data)
Exemplo n.º 6
0
def get_recommendation(db, etf_stock_list, output):
    csvreader = csv.reader(etf_stock_list)
    stocks = [x for x in csvreader]

    create_table_qry = """
    create table {table_name} (
        stockCode varchar(10) COLLATE SQL_Latin1_General_CP1_CI_AS
    )
    """
    tt_name = TempTable.create_from_data(db, stocks, create_table_qry)
    logger.info(tt_name)

    rows = db.get_data(qry_get_recommendation(tt_name))

    csvwriter = csv.writer(output, lineterminator="\n")
    csvwriter.writerow(["StockCode", "Recommendation", "DateFrom"])
    csvwriter.writerows(rows)
Exemplo n.º 7
0
def process(db, excel_file, sheet_name_or_idx):

    helper.backup_table(db, 'tblInvestmentGrowthSeries')

    # Import excel file into temp table
    tt_name = helper.upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx)
    unique_investmentids = db.get_one_value(qry_unique_investment_id(tt_name))
    logger.info(unique_investmentids)

    current_gs_tt_name = TempTable.create_from_query(db, qry_current_growth_series(tt_name))

    qry_turn_off_gs = partial(qry_update_gs, 0, current_gs_tt_name)
    qry_turn_on_gs = partial(qry_update_gs, 1, current_gs_tt_name)

    assert unique_investmentids == len(current_gs_tt_name)

    db.execute(qry_turn_off_gs())   # ignore count
    db.execute(qry_turn_on_gs())   # ignore count
Exemplo n.º 8
0
def double_check(db, output):
    report_id = 11
    tt_name = TempTable.create_from_query(db, qry_get_fund_hierarchy())

    data = db.get_data(qry_get_investment_detail(tt_name))

    csvwriter = csv.writer(output, lineterminator='\n')
    header = ['FundID', 'ParentFundID', 'Instrumentid', 'Apircode', 'InvestmentFullName', 'InvestmentStatus'
              , 'IsWholesale', 'isMainTaxStructure', 'Recommendation', 'RecommendationDateFrom', 'RecommendationStatus']
    csvwriter.writerow(header)

    def cleanup(row):
        if row[0] == row[1]:
            row[1] = ''
        return row

    data = [cleanup(row) for row in data]
    csvwriter.writerows(data)
Exemplo n.º 9
0
def upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx):
    def get_data_from_excel():
        reader = ExcelReader(excel_file)
        rows = reader.get_data_from_sheet(sheet_name_or_idx)
        return rows, reader.create_qry
    tt_name = TempTable.create_from_data(db, *get_data_from_excel())

    try:
        # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
        date_value = db.get_one_value('select top 1 [date] from {}'.format(tt_name))

        if not isinstance(date_value, datetime.datetime):
            if isinstance(date_value, str):
                logger.info('Convert varchar to datetime for [date] column')

                # update table to yyyy-mm-dd format before convert to datetime type
                db.execute('''
                    update {}
                    set [date]=right([date],4)+'-'+SUBSTRING([date], 4, 2) + '-' + left([date],2)
                '''.format(tt_name))
            elif isinstance(date_value, float):
                logger.info('Convert float to datetime for [date] column')
                # SQL Server counts its dates from 01/01/1900 and Excel from 12/30/1899 = 2 days less.
                # update table to yyyy-mm-dd format before convert to datetime type
                db.execute('''
                    alter table {tt_name}
                    alter column [date] varchar(20)
                '''.format(tt_name=tt_name))

                db.execute('''
                    update {tt_name}
                    set date=cast(date - 2 as datetime)
                '''.format(tt_name=tt_name))

            db.execute('''
                alter table {}
                alter column [date] date
            '''.format(tt_name))
    except pyodbc.ProgrammingError:
        logger.info("{tt_name} does not have date column".format(tt_name=tt_name))

    return tt_name
Exemplo n.º 10
0
def upload_top10(db, excel_file, sheet_name_or_idx, data_provider):

    tt_name = helper.upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx)

    unique_tt_name = TempTable.create_from_query(db, qry_get_distinct_code_date(tt_name))

    # back up ExternalData.dbo.tblUnderlyingFund
    backup_table(db, 'ExternalData.dbo.tblUnderlyingFund')
    count = db.execute(qry_update_fund_name_tblUnderlyingFund(unique_tt_name))
    logger.info('{} fund name updated in tblUnderlyingFund'.format(count))

    count = db.execute(qry_insert_new_fund_tblUnderlyingFund(unique_tt_name, data_provider))
    logger.info('{} fund name inserted in tblUnderlyingFund'.format(count))

    count = db.execute(qry_update_DateFrom_tblUnderlyingFund(unique_tt_name))
    logger.info('{} fund name updated with latest date in tblUnderlyingFund'.format(count))
    assert count == len(unique_tt_name)

    # close off any open holdings for funds where changed
    # back up ExternalData.dbo.tblTopHoldings
    backup_table(db, 'ExternalData.dbo.tblTopHoldings')
    count = db.execute(qry_update_DateTo_of_current_holdings(unique_tt_name))
    logger.info('{} updated by closing off any open holdings for funds where changed'.format(count))

    # add the new holding code
    backup_table(db, 'ExternalData.dbo.tblHolding')
    count = db.execute(qry_add_new_holding_name(tt_name, data_provider))
    logger.info('{} updated by adding the new holding code'.format(count))

    # add new holding
    count = db.execute(qry_add_new_holdings(tt_name, data_provider))
    logger.info('{} updated by adding the new holding'.format(count))
    assert count == len(tt_name)

    count = db.execute(qry_regenerate_report(tt_name))
    logger.info('{} updated for regenerating report'.format(count))

    # cleaning up junk data due to re-run
    count = db.execute(qry_delete_rerun_data('ExternalData..tblTopHoldings'))
    logger.info('{} deleted in ExternalData..tblTopHoldings due to re-run'.format(count))
Exemplo n.º 11
0
def process(db, excel_file, dry_run):
    # Import excel file into temp table
    reader = ExcelReader(excel_file)
    rows = reader.get_data_from_sheet(0)
    tt_name = TempTable.create_from_data(db, rows, reader.create_qry)

    # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
    count = db.execute(import_growth_series_query(tt_name))

    assert len(tt_name) == count

    # back up tblInvestmentGrowthSeries
    today = datetime.date.today()
    today = today.strftime('%Y%m%d')

    if not db.sp_columns('tblInvestmentGrowthSeries_{date}'.format(date=today)):
        # if table does not exist
        logger.info('Creating tblInvestmentGrowthSeries_{date}'.format(date=today))
        db.execute('select * into tblInvestmentGrowthSeries_{date} from tblInvestmentGrowthSeries'.format(date=today))

    # get investment id from [ExternalData].dbo.tblGrowthSeries
    investment_id = db.get_one_value('select top 1 code from [ExternalData].dbo.ImportGrowthSeries')
    logger.info(investment_id)
    logger.info('Before updating')
    logger.info(db.get_data('select top 1 * from tblInvestmentGrowthSeries where investmentid=? order by [date] desc'
                            , investment_id))

    count = db.execute(merge_to_tblInvestmentGrowthSeries_query())
    logger.info("{} records updated in tblInvestmentGrowthSeries".format(count))
    logger.info('After updating')
    logger.info(db.get_data('select top 1 * from tblInvestmentGrowthSeries where investmentid=? order by [date] desc'
                            , investment_id))

    if not dry_run:
        logger.info('Commit changes')
        db.commit()
    else:
        logger.info('All changes did not commit')
Exemplo n.º 12
0
def upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx):
    def get_data_from_excel():
        reader = ExcelReader(excel_file)
        rows = reader.get_data_from_sheet(sheet_name_or_idx)
        return rows, reader.create_qry
    tt_name = TempTable.create_from_data(db, *get_data_from_excel())

    # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
    date_value = db.get_one_value('select top 1 [date] from {}'.format(tt_name))

    if not isinstance(date_value, datetime.datetime):
        if isinstance(date_value, str):
            logger.info('Convert varchar to datetime for [date] column')

            # update table to yyyy-mm-dd format before convert to datetime type
            db.execute('''
                update {}
                set [date]=right([date],4)+'-'+SUBSTRING([date], 4, 2) + '-' + left([date],2)
            '''.format(tt_name))
        elif isinstance(date_value, float):
            logger.info('Convert float to datetime for [date] column')
            # SQL Server counts its dates from 01/01/1900 and Excel from 12/30/1899 = 2 days less.
            # update table to yyyy-mm-dd format before convert to datetime type
            db.execute('''
                alter table {tt_name}
                alter column [date] varchar(20)
            '''.format(tt_name=tt_name))

            db.execute('''
                update {tt_name}
                set date=cast(date - 2 as datetime)
            '''.format(tt_name=tt_name))

        db.execute('''
            alter table {}
            alter column [date] date
        '''.format(tt_name))
    return tt_name
Exemplo n.º 13
0
    header = 'ClientID,ModuleID,ModuleName'
    write_data(r'C:\Users\Lmai\Documents\Workspaces\users_subscription_20150917.csv', data, header)

    # get client advisor codes
    advisor_codes = db.get_data(qry_get_broker_advisor_codes(clientids))
    advisor_codes = [list(zip(repeat(code[0]), code[1].split(','))) for code in advisor_codes]
    print(advisor_codes)
    advisor_codes = list(chain(*advisor_codes))
    if advisor_codes:
        create_qry = '''
        create table {table_name} (
            clientid int,
            advisercode varchar(10) collate Latin1_General_CI_AS
        )
        '''
        tt_name = TempTable.create_from_data(octopus_db, advisor_codes, create_qry)

        rows = octopus_db.get_data(qry_get_advcode(tt_name))

        orig_clientids = {x for x in clientids}
        out_clientids = {row[0] for row in rows}
        print("Below users do not have advcode")
        print(orig_clientids - out_clientids)
        with open(r'C:\Users\Lmai\Documents\Workspaces\test_clientadvcodes.csv', 'w') as f:
            csvwriter = csv.writer(f, lineterminator='\n')
            csvwriter.writerow(['userid', 'advcode'])
            csvwriter.writerows(rows)

    clientmoduleids_lookup = defaultdict(list)

    rows = []
Exemplo n.º 14
0
def add(db, from_report_id, to_report_id, report_type):
    table_name = 'tbl{report_type}'.format(report_type=report_type)
    helper.backup_table(db, table_name)
    if from_report_id != to_report_id:
        # from report 1 to report 35
        # update report 1 to
        direct_property_sector_tt_name = TempTable.create_from_query(db, qry_get_direct_property_sector())
        report_tt_name = TempTable.create_from_query(db, qry_get_existing_fund_update(table_name, from_report_id,
                                                                                  direct_property_sector_tt_name))

        data = db.get_data('''
        select tt.*, isnull(f.APIRCode, f.instrumentid) as newReportURL
        from {tt_name} tt
        left join vewISF_Fund f on tt.fundid = f.fundid and f.IsMainTaxStructure=1
        '''.format(tt_name=report_tt_name))

        assert len(report_tt_name) == len(data)
        ok = []
        errors = []
        no_recommd = []
        #data = data[:1]
        for row in data:
            fund_id = row.FundID
            parent_fund_id = get_parent_id(db, fund_id)
            report_url = get_report_url(db, parent_fund_id)
            logger.info('{} has parent ID of {}'.format(fund_id, parent_fund_id))
            if report_url is None:
                logger.info('{} {} has less than 1 main tax structure'.format('-'*80, parent_fund_id))
                errors += [fund_id]
            else:
                urllink = 'https://reports.lonsec.com.au/FV/{}'.format(report_url)
                logger.info('Checking {} exists'.format(urllink))
                is_link_exists = link_exists(urllink) if fund_id in [9356,8416,8432,14703,12883,548,15933,16086] else True
                if is_link_exists:
                    logger.info('----> OK')
                    ok += [fund_id]
                    #Lonsec..prcFundReportPut @fundid=4912, @reportid=33, @reportURL='YOC0100AU',@IsActive=1
                    # ,@AnalystID=56526, @AuthoriserID=56036
                    count = add_report(db, fund_id, to_report_id, report_url, analyst_id=56526, authoriser_id=56036)
                    logger.info('{} inserted'.format(count))
                elif not does_fund_has_recom(db, fund_id):
                    no_recommd += [fund_id]
                else:
                    logger.info('----> REPORT MISSING')
                    errors += [fund_id]

        logger.info('GOOD: {}'.format(len(ok)))
        logger.info('ERROR: {}'.format(len(errors)))
        logger.info('---> {}'.format(','.join(map(str, errors))))
        logger.info('NO RECOMENDATIONS OR SCREENOUT: {}'.format(len(no_recommd)))
        logger.info('---> {}'.format(','.join(map(str, no_recommd))))

        assert len(ok) + len(errors) + len(no_recommd) == len(data)

        # mark all of fund_id from from_report_id to deactive
        all_fund_ids = ok + errors + no_recommd

        count = db.execute(qry_deactive_old_report(from_report_id, all_fund_ids))

        logger.info('{} is marked inactive'.format(count))

        # due to trigger, count may not return correct value
        # ignore for now
        #assert count == len(data)

    elif from_report_id == 24:   # etf view point

        # investmentreport, just change to stock code
        data = db.get_data(qry_get_existing_investment_viewpoints(table_name, from_report_id))
        data = [(row.InvestmentID, row.StockCode) for row in data]
        errors = []
        ok = []
        no_reports = []
        for investment_id, stock_code in data:
            if stock_code:
                urllink = 'https://reports.lonsec.com.au/FV/{}'.format(stock_code)
                logger.info('Checking {} exists'.format(urllink))

                is_link_exists = link_exists(urllink)
                if is_link_exists:
                    logger.info('----> OK')
                    count = db.execute(qry_update_investment_viewpoint(investment_id, stock_code, to_report_id))
                    logger.info('{} updated'.format(count))
                    ok += [investment_id]
                else:
                    logger.info('----> REPORT MISSING')
                    no_reports += [investment_id]
            else:
                errors += [investment_id]

        logger.info('GOOD: {}'.format(len(ok)))
        logger.info('NO REPORTS: {}'.format(len(no_reports)))
        logger.info('---> {}'.format(','.join(map(str, no_reports))))
        logger.info('ERROR: {} (No stock codes)'.format(len(errors)))
        logger.info('---> {}'.format(','.join(map(str, errors))))

        assert len(ok) + len(errors) + len(no_reports) == len(data)

        # mark all of fund_id from from_report_id to deactive
        all_investment_ids = errors + no_reports

        count = db.execute(qry_deactive_invalid_investment_report(from_report_id, all_investment_ids))
        logger.info('{} is marked inactive'.format(count))

    elif from_report_id == 11:  # fund profile
        data = db.get_data(qry_get_existing_fund_profile(table_name, from_report_id))
        data = [(row.InvestmentID, row.StockCode) for row in data]
        errors = []
        ok = []
        no_reports = []
        for investment_id, stock_code in data:
            if stock_code:
                urllink = 'https://reports.lonsec.com.au/FP/{}'.format(stock_code)
                logger.info('Checking {} exists'.format(urllink))
                #is_link_exists = link_exists(urllink)
                is_link_exists = True
                if is_link_exists:
                    logger.info('----> OK')
                    count = db.execute(qry_update_investment_viewpoint(investment_id, stock_code, to_report_id))
                    logger.info('{} updated'.format(count))
                    ok += [investment_id]
                else:
                    logger.info('----> REPORT MISSING')
                    no_reports += [investment_id]
            else:
                errors += [investment_id]

        logger.info('GOOD: {}'.format(len(ok)))
        logger.info('NO REPORTS: {}'.format(len(no_reports)))
        logger.info('---> {}'.format(','.join(map(str, no_reports))))
        logger.info('ERROR: {} (No stock codes)'.format(len(errors)))
        logger.info('---> {}'.format(','.join(map(str, errors))))

        assert len(ok) + len(errors) + len(no_reports) == len(data)

        # mark all of fund_id from from_report_id to deactive
        all_investment_ids = errors + no_reports
        if all_investment_ids:
            count = db.execute(qry_deactive_invalid_investment_report(from_report_id, all_investment_ids))
            logger.info('{} is marked inactive'.format(count))
    else:
        raise Exception('not implemented')