Ejemplo n.º 1
0
def double_check(db):
    report_id = 24

    helper.backup_table(db, 'tblInvestmentReport')
    remove_report(db, report_id)

    data = db.get_data(qry_get_all_recommendation_funds())
    fund_ids = []
    for row in data:
        fund_id = row[1]
        report_url = row[2]
        if report_url:
            fund_ids += [[fund_id]]
            #logger.info('{}\t{}'.format(fund_id, report_url))
            #count = add_report(db, fund_id, report_id, report_url, analyst_id=56526, authoriser_id=56036)
            #logger.info('Adding report for {} - {}'.format(fund_id, count))
    create_table_query = '''
    create table {table_name} (
    id int
    )
    '''
    tt_name = TempTable.create_from_data(db, fund_ids, create_table_query)
    data = db.get_data('''
    select tt.ID, ve.stockCode as reportURL
    from {tt_name} tt
    left join vewEquities ve on tt.ID = ve.StockID
    left join tblInvestmentReport ir on tt.ID = ir.InvestmentID and ir.reportID={report_id} and ir.isActive=1
    where ir.reportURL is null
    '''.format(tt_name=tt_name, report_id=report_id))
    for row in data:
        fund_id = row[0]
        report_url = row[1]
        count = add_report(db, fund_id, report_id, report_url, analyst_id=56526, authoriser_id=56036)
        logger.info('Adding report for {} - {}'.format(fund_id, count))
Ejemplo n.º 2
0
def get_recommendation(db, etf_stock_list, output):
    csvreader = csv.reader(etf_stock_list)
    stocks = [x for x in csvreader]

    create_table_qry = """
    create table {table_name} (
        stockCode varchar(10) COLLATE SQL_Latin1_General_CP1_CI_AS
    )
    """
    tt_name = TempTable.create_from_data(db, stocks, create_table_qry)
    logger.info(tt_name)

    rows = db.get_data(qry_get_recommendation(tt_name))

    csvwriter = csv.writer(output, lineterminator="\n")
    csvwriter.writerow(["StockCode", "Recommendation", "DateFrom"])
    csvwriter.writerows(rows)
Ejemplo n.º 3
0
def upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx):
    def get_data_from_excel():
        reader = ExcelReader(excel_file)
        rows = reader.get_data_from_sheet(sheet_name_or_idx)
        return rows, reader.create_qry
    tt_name = TempTable.create_from_data(db, *get_data_from_excel())

    try:
        # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
        date_value = db.get_one_value('select top 1 [date] from {}'.format(tt_name))

        if not isinstance(date_value, datetime.datetime):
            if isinstance(date_value, str):
                logger.info('Convert varchar to datetime for [date] column')

                # update table to yyyy-mm-dd format before convert to datetime type
                db.execute('''
                    update {}
                    set [date]=right([date],4)+'-'+SUBSTRING([date], 4, 2) + '-' + left([date],2)
                '''.format(tt_name))
            elif isinstance(date_value, float):
                logger.info('Convert float to datetime for [date] column')
                # SQL Server counts its dates from 01/01/1900 and Excel from 12/30/1899 = 2 days less.
                # update table to yyyy-mm-dd format before convert to datetime type
                db.execute('''
                    alter table {tt_name}
                    alter column [date] varchar(20)
                '''.format(tt_name=tt_name))

                db.execute('''
                    update {tt_name}
                    set date=cast(date - 2 as datetime)
                '''.format(tt_name=tt_name))

            db.execute('''
                alter table {}
                alter column [date] date
            '''.format(tt_name))
    except pyodbc.ProgrammingError:
        logger.info("{tt_name} does not have date column".format(tt_name=tt_name))

    return tt_name
Ejemplo n.º 4
0
def process(db, excel_file, dry_run):
    # Import excel file into temp table
    reader = ExcelReader(excel_file)
    rows = reader.get_data_from_sheet(0)
    tt_name = TempTable.create_from_data(db, rows, reader.create_qry)

    # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
    count = db.execute(import_growth_series_query(tt_name))

    assert len(tt_name) == count

    # back up tblInvestmentGrowthSeries
    today = datetime.date.today()
    today = today.strftime('%Y%m%d')

    if not db.sp_columns('tblInvestmentGrowthSeries_{date}'.format(date=today)):
        # if table does not exist
        logger.info('Creating tblInvestmentGrowthSeries_{date}'.format(date=today))
        db.execute('select * into tblInvestmentGrowthSeries_{date} from tblInvestmentGrowthSeries'.format(date=today))

    # get investment id from [ExternalData].dbo.tblGrowthSeries
    investment_id = db.get_one_value('select top 1 code from [ExternalData].dbo.ImportGrowthSeries')
    logger.info(investment_id)
    logger.info('Before updating')
    logger.info(db.get_data('select top 1 * from tblInvestmentGrowthSeries where investmentid=? order by [date] desc'
                            , investment_id))

    count = db.execute(merge_to_tblInvestmentGrowthSeries_query())
    logger.info("{} records updated in tblInvestmentGrowthSeries".format(count))
    logger.info('After updating')
    logger.info(db.get_data('select top 1 * from tblInvestmentGrowthSeries where investmentid=? order by [date] desc'
                            , investment_id))

    if not dry_run:
        logger.info('Commit changes')
        db.commit()
    else:
        logger.info('All changes did not commit')
Ejemplo n.º 5
0
def upload_excel_to_tempdb(db, excel_file, sheet_name_or_idx):
    def get_data_from_excel():
        reader = ExcelReader(excel_file)
        rows = reader.get_data_from_sheet(sheet_name_or_idx)
        return rows, reader.create_qry
    tt_name = TempTable.create_from_data(db, *get_data_from_excel())

    # Extract records in [ExternalData].dbo.tblGrowthSeries that matches InvestmentID in temp table imported above
    date_value = db.get_one_value('select top 1 [date] from {}'.format(tt_name))

    if not isinstance(date_value, datetime.datetime):
        if isinstance(date_value, str):
            logger.info('Convert varchar to datetime for [date] column')

            # update table to yyyy-mm-dd format before convert to datetime type
            db.execute('''
                update {}
                set [date]=right([date],4)+'-'+SUBSTRING([date], 4, 2) + '-' + left([date],2)
            '''.format(tt_name))
        elif isinstance(date_value, float):
            logger.info('Convert float to datetime for [date] column')
            # SQL Server counts its dates from 01/01/1900 and Excel from 12/30/1899 = 2 days less.
            # update table to yyyy-mm-dd format before convert to datetime type
            db.execute('''
                alter table {tt_name}
                alter column [date] varchar(20)
            '''.format(tt_name=tt_name))

            db.execute('''
                update {tt_name}
                set date=cast(date - 2 as datetime)
            '''.format(tt_name=tt_name))

        db.execute('''
            alter table {}
            alter column [date] date
        '''.format(tt_name))
    return tt_name
Ejemplo n.º 6
0
    header = 'ClientID,ModuleID,ModuleName'
    write_data(r'C:\Users\Lmai\Documents\Workspaces\users_subscription_20150917.csv', data, header)

    # get client advisor codes
    advisor_codes = db.get_data(qry_get_broker_advisor_codes(clientids))
    advisor_codes = [list(zip(repeat(code[0]), code[1].split(','))) for code in advisor_codes]
    print(advisor_codes)
    advisor_codes = list(chain(*advisor_codes))
    if advisor_codes:
        create_qry = '''
        create table {table_name} (
            clientid int,
            advisercode varchar(10) collate Latin1_General_CI_AS
        )
        '''
        tt_name = TempTable.create_from_data(octopus_db, advisor_codes, create_qry)

        rows = octopus_db.get_data(qry_get_advcode(tt_name))

        orig_clientids = {x for x in clientids}
        out_clientids = {row[0] for row in rows}
        print("Below users do not have advcode")
        print(orig_clientids - out_clientids)
        with open(r'C:\Users\Lmai\Documents\Workspaces\test_clientadvcodes.csv', 'w') as f:
            csvwriter = csv.writer(f, lineterminator='\n')
            csvwriter.writerow(['userid', 'advcode'])
            csvwriter.writerows(rows)

    clientmoduleids_lookup = defaultdict(list)

    rows = []