Example #1
0
 def refreshFundInfo(self,fund_universe,add_managers=[],include_indices=False):
     t = date.today()
     cur_date = date(t.year,t.month,15)
     get = BackStopDataManager()
     stats_cache_updater = StatsCacheUpdater(self.cache_path)
             
     file_def = open("def/refresh_fund_def.dat")
     report_def = file_def.read()
     file_def.close()
     if fund_universe != "PeerGroup":
         if fund_universe == 1:
             file_res = open("def/refresh_inv_res.dat")
             report_res = file_res.read()
             file_res.close()
         elif fund_universe == 2:
             file_res = open("def/refresh_focus_res.dat")
             report_res = file_res.read()
             file_res.close()
         elif fund_universe == 3:
             file_res = open("def/refresh_inv_focus_res.dat")
             report_res = file_res.read()
             file_res.close()
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[0][1]].DMRR = datetime.strptime(data[1][1],"%m/%d/%Y")
             self.fund_index[data[0][1]].IL = data[2][1]
             self.fund_index[data[0][1]].Signal = data[3][1]
             self.fund_index[data[0][1]].DMRR = stats_cache_updater.getActualDMRR(data[0][1],self.fund_index[data[0][1]].DMRR)
             
     if len(add_managers) > 0:
         report_res = """${(report.field1 == """ + str(add_managers[0]) + """)"""
         for id in add_managers[1:]:
             report_res += """ && (report.field1 == """ + str(id) + """)"""
         report_res += """}"""
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[0][1]].DMRR = datetime.strptime(data[1][1],"%m/%d/%Y")
             self.fund_index[data[0][1]].IL = data[2][1]
             self.fund_index[data[0][1]].Signal = data[3][1]
             self.fund_index[data[0][1]].DMRR = stats_cache_updater.getActualDMRR(data[0][1],self.fund_index[data[0][1]].DMRR)
     
     if include_indices:
         self.MCPFAI_funds = []
         file_def = open("def/MCPFAI_def.dat")
         file_res = open("def/MCPFAI_res.dat")
         report_def = file_def.read()
         report_res = file_res.read()
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[1][1]].DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
             self.fund_index[data[1][1]].IL = "MCP Fund or Index"
             self.fund_index[data[1][1]].Class = "MCP Fund or Index"
             self.fund_index[data[1][1]].Signal = data[4][1]
             self.fund_index[data[1][1]].DMRR = stats_cache_updater.getActualDMRR(data[1][1],self.fund_index[data[1][1]].DMRR)
             self.MCPFAI_funds.append(self.fund_index[data[1][1]])                
Example #2
0
class MISCacheUpdater:
    def __init__(self, cache_path, pg=None):
        self.bsdm = BackStopDataManager()
        self.cache_path = cache_path
        self.pg = pg

        if self.cache_path[len(self.cache_path) - 1] != "/":
            self.cache_path += "/"

        self.checkFolders()
        self.loaded = [
            False, False, False, False, False, False, False, False, False,
            False, False
        ]

        self.funds = {}
        self.firms = {}
        self.products = {}

    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "reports", os.F_OK):
            os.makedirs(cp + "reports")
        if not os.access(cp + "firms", os.F_OK):
            os.makedirs(cp + "firms")
        if not os.access(cp + "funds", os.F_OK):
            os.makedirs(cp + "funds")
        if not os.access(cp + "products", os.F_OK):
            os.makedirs(cp + "products")

    def clearCache(self):
        cp = self.cache_path
        if os.access(cp, os.F_OK):
            shutil.rmtree(cp)
        self.checkFolders()

    def syncCache(self, source_path):
        s_time = time.clock()
        self.clearCache()
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[
            0:len(self.cache_path) - 1] + "\" /e /i /h /R /Y"
        os.system(cmd_str)
        print "Copy took", (time.clock() - s_time), "secs"

    def update(self, as_of_date=date.today()):
        self.clearCache()

        self.loadFundData(as_of_date)
        self.loadProductData(as_of_date)
        self.loadContactsData(as_of_date)
        self.loadTransactionData(as_of_date)
        self.loadInternalTransactionData(as_of_date)
        self.loadMeetingsData(as_of_date)
        self.loadExposureData(as_of_date)
        self.loadHoldingsData(as_of_date)
        self.loadAumData(as_of_date)
        self.loadReturnsData(as_of_date)
        self.loadStatisticsData(as_of_date)

    def loadFundData(self, as_of_date, load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)

        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/fund_def.dat")
            f_res = open("def/fund_res.dat")

            if self.pg != None:
                self.pg.addMessage("Downloading Fund Data")
                self.pg.incSub("running funds report.....")

            fund_data = self.bsdm.runFundsReport(f_def.read(), f_res.read(),
                                                 as_of_date)

            if self.pg != None:
                self.pg.incSub("finished funds report.....")

            rep_file = open(cp + "reports/fund_rep.cache", 'w')
            cPickle.dump(fund_data, rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/fund_rep.cache")
            fund_data = cPickle.load(rep_file)
            rep_file.close()

        for f in fund_data:
            tmp_fund = Fund()
            tmp_fund.backstop_id = int(f[0][1])
            tmp_fund.name = f[1][1]
            tmp_fund.first_investment_date = datetime.strptime(
                f[6][1], "%m/%d/%Y")
            tmp_fund.incept_date = datetime.strptime(f[3][1], "%m/%d/%Y")
            if int(f[30][1]) not in self.firms.keys():
                tmp_fund.firm = Firm()
                tmp_fund.firm.name = f[31][1]
                tmp_fund.firm.backstop_id = int(f[30][1])
                self.firms[int(f[30][1])] = tmp_fund.firm
            self.funds[tmp_fund.backstop_id] = tmp_fund

        self.loaded[0] = True

    def loadProductData(self, as_of_date, load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total * .4, 2.0)

        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/product_def.dat")
            f_res = open("def/product_res.dat")

            if self.pg != None:
                self.pg.addMessage("Downloading Product Data")
                self.pg.incSub("running products report.....")

            product_rep = self.bsdm.runProductsReport(f_def.read(),
                                                      f_res.read(), as_of_date)
            product_bal = {}

            if self.pg != None:
                self.pg.incSub("finished products report.....")
                self.pg.startSubProcess(self.pg.cur_sub_proc_total * .6,
                                        len(product_rep))

            for p in product_rep:
                if self.pg != None:
                    self.pg.incSub("getting " + p[1][1] + " data")
                bal = self.bsdm.getProductBalances(int(p[0][1]), date.today(),
                                                   date.today())
                product_bal[int(p[0][1])] = float(bal[0][1])

            product_data = [product_rep, product_bal]

            shutil.rmtree(cp + "products/")
            os.mkdir(cp + "products/")

            rep_file = open(cp + "reports/product_rep.cache", 'w')
            cPickle.dump(product_data, rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/product_rep.cache")
            product_data = cPickle.load(rep_file)
            product_bal = product_data[1]
            product_rep = product_data[0]
            rep_file.close()

        for p in product_rep:
            if int(p[0][1]) not in self.products.keys():
                tmp_product = Product()
                tmp_product.backstop_id = int(p[0][1])
                tmp_product.name = p[1][1]
                tmp_product.balance = product_bal[tmp_product.backstop_id]
                if tmp_product.balance > 0:
                    self.products[int(p[0][1])] = tmp_product
        self.loaded[1] = True

    def loadContactsData(self, as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)
            self.pg.addMessage("Downloading Contacts Data.....")
            self.pg.incSub("running people/orgs report.....")

        cp = self.cache_path
        f_def = open("def/people_orgs_def.dat")
        f_res = open("def/people_orgs_res.dat")
        contact_data = self.bsdm.runPeopleOrgsReport(f_def.read(),
                                                     f_res.read(), as_of_date)

        if self.pg != None:
            self.pg.incSub("finished people/orgs report.....")

        rep_file = open(cp + "reports/people_org_rep.cache", 'w')
        cPickle.dump(contact_data, rep_file)
        rep_file.close()
        self.loaded[2] = True

    def loadTransactionData(self, as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)
            self.pg.addMessage("Downloading Transaction Data.....")
            self.pg.incSub("running transaction report.....")

        cp = self.cache_path
        f_def = open("def/port_trans_def.dat")
        f_res = open("def/port_trans_res.dat")
        trans_data = self.bsdm.runPortfolioTransactionsReport(
            f_def.read(), f_res.read(), as_of_date)

        if self.pg != None:
            self.pg.incSub("finished transaction report.....")

        rep_file = open(cp + "reports/trans_rep.cache", 'w')
        cPickle.dump(trans_data, rep_file)
        rep_file.close()
        self.loaded[3] = True

    def loadInternalTransactionData(self, as_of_date, product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.products.keys()))

        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:
                self.pg.incSub("getting holdings for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                prod_path = cp + "products/" + self.products[p].name
                if not os.access(prod_path + "/", os.F_OK):
                    os.mkdir(prod_path)
                trans_data = {}
                holding_ids = self.bsdm.getHoldingsInProduct(p)
                for h_id in holding_ids:
                    tmp = self.bsdm.getHoldingTransactions(
                        h_id, date(1994, 1, 1), as_of_date)
                    info = self.bsdm.getHoldingInformation(h_id)
                    trans_data[h_id] = [tmp, info]

                rep_file = open(prod_path + "/transactions.cache", 'w')
                cPickle.dump(trans_data, rep_file)
                rep_file.close()
        self.loaded[4] = True

    def loadMeetingsData(self, as_of_date, firm_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.firms.keys()))
            self.pg.addMessage("Downloading Meetings Data.....")

        cp = self.cache_path
        for f in self.firms.keys():
            if self.pg != None:
                self.pg.incSub("getting meeting info for " +
                               self.firms[f].name)
            if firm_name == self.firms[f].name or firm_name == "None":
                firm_path = cp + "firms/" + self.firms[f].name
                if not os.access(firm_path + "/", os.F_OK):
                    os.mkdir(firm_path)
                meeting_data = self.bsdm.getMeetingInfoByBackstopIdandType(
                    f, "Organization")

                rep_file = open(firm_path + "/meeting.cache", 'w')
                cPickle.dump(meeting_data, rep_file)
                rep_file.close()
        self.loaded[5] = True

    def loadExposureData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading Exposure Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting exposure data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)

                exp_data = self.bsdm.getExposureData(
                    self.funds[f].backstop_id,
                    self.funds[f].first_investment_date, as_of_date)

                final_exp_data = []
                for cat in exp_data:
                    if cat[0] == "PORTFOLIO EXPOSURE" and cat[
                            1] == "Portfolio Exposure":
                        final_exp_data.append(cat[2])

                rep_file = open(fund_path + "/exposure_data.cache", 'w')
                cPickle.dump(final_exp_data, rep_file)
                rep_file.close()
        self.loaded[6] = True

    def loadHoldingsData(self, as_of_date, product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.products.keys()))
            self.pg.addMessage("Downloading Holdings Data.....")

        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:
                self.pg.incSub("getting holdings data for " +
                               self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                fund_path = cp + "products/" + self.products[p].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                holding_data = {}

                holding_ids = self.bsdm.getHoldingIds(
                    self.products[p].backstop_id)
                for h_id in holding_ids:
                    info = self.bsdm.getHoldingInformation(h_id)
                    bal = self.bsdm.getHoldingBalances(h_id, as_of_date,
                                                       as_of_date)
                    holding_data[h_id] = [info, bal]

                rep_file = open(fund_path + "/holding_data.cache", 'w')
                cPickle.dump(holding_data, rep_file)
                rep_file.close()
        self.loaded[7] = True

    def loadAumData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading AUM Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting aum data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                aum_data = self.bsdm.getHedgeFundAums(
                    self.funds[f].backstop_id, date(1990, 1, 1), as_of_date)

                rep_file = open(fund_path + "/aum_data.cache", 'w')
                cPickle.dump(aum_data, rep_file)
                rep_file.close()
        self.loaded[8] = True

    def loadReturnsData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading Returns Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting returns data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                return_data = self.bsdm.getHedgeFundReturns(
                    self.funds[f].backstop_id, self.funds[f].incept_date,
                    as_of_date)

                rep_file = open(fund_path + "/return_data.cache", 'w')
                cPickle.dump(return_data, rep_file)
                rep_file.close()
        self.loaded[9] = True

    def loadStatisticsData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading statistics Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting statistics data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund = self.funds[f]
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                stats = StatisticsCalculations(self.bsdm, f,
                                               fund.first_investment_date,
                                               as_of_date)
                stats_data = {}

                try:
                    stats_data["Compound Return"] = [stats.CompoundReturn()]
                except:
                    stats_data["Compound Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Geometric Return"] = [
                        stats.AnnualizedGeometricReturn()
                    ]
                except:
                    stats_data["Annualized Geometric Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [
                        stats.AnnualizedArithmeticStandardDeviation()
                    ]
                except:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [
                        "N/A"
                    ]
                    pass
                try:
                    stats_data["Beta"] = [stats.Beta()]
                except:
                    stats_data["Beta"] = ["N/A"]
                    pass
                try:
                    stats_data["Alpha"] = [stats.AnnualizedAlpha()]
                except:
                    stats_data["Alpha"] = ["N/A"]
                    pass
                try:
                    stats_data["Correlation Coefficient"] = [
                        stats.CorrelationCoefficient()
                    ]
                except:
                    stats_data["Correlation Coefficient"] = ["N/A"]
                    pass
                try:
                    stats_data["RSquared"] = [stats.RSquared()]
                except:
                    stats_data["RSquared"] = ["N/A"]
                    pass
                try:
                    stats_data["Max Drawdown"] = [stats.MaxDrawDown()]
                except:
                    stats_data["Max Drawdown"] = ["N/A"]
                    pass
                try:
                    stats_data["Sharpe Ratio"] = [stats.SharpeRatio()]
                except:
                    stats_data["Sharpe Ratio"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Up-Capture"] = [
                        stats.AnnualizedUpCapture()
                    ]
                except:
                    stats_data["Annualized Up-Capture"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Down-Capture"] = [
                        stats.AnnualizedDownCapture()
                    ]
                except:
                    stats_data["Annualized Down-Capture"] = ["N/A"]
                    pass
                rep_file = open(fund_path + "/stats_data.cache", 'w')
                cPickle.dump(stats_data, rep_file)
                rep_file.close()
        self.loaded[10] = True
Example #3
0
class MISCacheUpdater:
    def __init__(self,cache_path,pg=None):
        self.bsdm = BackStopDataManager()
        self.cache_path = cache_path
        self.pg = pg
        
        if self.cache_path[len(self.cache_path)-1] != "/":
            self.cache_path += "/"
             
        self.checkFolders()
        self.loaded = [False,False,False,False,False,False,False,False,False,False,False]
        
        self.funds = {}
        self.firms = {}
        self.products = {}
    
    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "reports", os.F_OK):
            os.makedirs(cp + "reports")
        if not os.access(cp + "firms", os.F_OK):
            os.makedirs(cp + "firms")
        if not os.access(cp + "funds", os.F_OK):
            os.makedirs(cp + "funds")
        if not os.access(cp + "products", os.F_OK):
            os.makedirs(cp + "products")
        
    def clearCache(self):
        cp = self.cache_path
        if os.access(cp, os.F_OK):
            shutil.rmtree(cp)
        self.checkFolders()
    
    def syncCache(self,source_path):
        s_time = time.clock()
        self.clearCache()
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[0:len(self.cache_path)-1] + "\" /e /i /h /R /Y"
        os.system(cmd_str)
        print "Copy took",(time.clock() - s_time),"secs"
        
    def update(self,as_of_date=date.today()):
        self.clearCache()
        
        self.loadFundData(as_of_date)
        self.loadProductData(as_of_date)
        self.loadContactsData(as_of_date)
        self.loadTransactionData(as_of_date)
        self.loadInternalTransactionData(as_of_date)
        self.loadMeetingsData(as_of_date)
        self.loadExposureData(as_of_date)
        self.loadHoldingsData(as_of_date)
        self.loadAumData(as_of_date)
        self.loadReturnsData(as_of_date)
        self.loadStatisticsData(as_of_date)
        
    def loadFundData(self,as_of_date,load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
        
        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/fund_def.dat")
            f_res = open("def/fund_res.dat")
            
            if self.pg != None:
                self.pg.addMessage("Downloading Fund Data")
                self.pg.incSub("running funds report.....")
            
            fund_data = self.bsdm.runFundsReport(f_def.read(),f_res.read(),as_of_date)
            
            if self.pg != None:
                self.pg.incSub("finished funds report.....")
            
            rep_file = open(cp + "reports/fund_rep.cache",'w')
            cPickle.dump(fund_data,rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/fund_rep.cache")
            fund_data = cPickle.load(rep_file)
            rep_file.close()
        
        for f in fund_data:
            tmp_fund = Fund()
            tmp_fund.backstop_id = int(f[0][1])
            tmp_fund.name = f[1][1]
            tmp_fund.first_investment_date = datetime.strptime(f[6][1],"%m/%d/%Y")
            tmp_fund.incept_date = datetime.strptime(f[3][1],"%m/%d/%Y")
            if int(f[30][1]) not in self.firms.keys():
                tmp_fund.firm = Firm()
                tmp_fund.firm.name = f[31][1]
                tmp_fund.firm.backstop_id = int(f[30][1])
                self.firms[int(f[30][1])] = tmp_fund.firm
            self.funds[tmp_fund.backstop_id] = tmp_fund

        self.loaded[0] = True
    
    def loadProductData(self,as_of_date,load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total*.4,2.0)
        
        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/product_def.dat")
            f_res = open("def/product_res.dat")
            
            if self.pg != None:
                self.pg.addMessage("Downloading Product Data")
                self.pg.incSub("running products report.....")
                
            product_rep = self.bsdm.runProductsReport(f_def.read(),f_res.read(),as_of_date)
            product_bal = {}
            
            if self.pg != None:
                self.pg.incSub("finished products report.....")
                self.pg.startSubProcess(self.pg.cur_sub_proc_total*.6,len(product_rep))
            
            for p in product_rep:
                if self.pg != None:
                    self.pg.incSub("getting " + p[1][1] + " data")
                bal = self.bsdm.getProductBalances(int(p[0][1]),date.today(),date.today())
                product_bal[int(p[0][1])] = float(bal[0][1])
            
            product_data = [product_rep,product_bal]
            
            shutil.rmtree(cp + "products/")
            os.mkdir(cp + "products/")
        
            rep_file = open(cp + "reports/product_rep.cache",'w')
            cPickle.dump(product_data,rep_file)
            rep_file.close() 
        else:
            rep_file = open(cp + "reports/product_rep.cache")
            product_data = cPickle.load(rep_file)
            product_bal = product_data[1]
            product_rep = product_data[0]            
            rep_file.close()
        
        for p in product_rep:
            if int(p[0][1]) not in self.products.keys():
                tmp_product = Product()
                tmp_product.backstop_id = int(p[0][1])
                tmp_product.name = p[1][1]
                tmp_product.balance = product_bal[tmp_product.backstop_id]
                if tmp_product.balance > 0:
                    self.products[int(p[0][1])] = tmp_product
        self.loaded[1] = True

    def loadContactsData(self,as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
            
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
            self.pg.addMessage("Downloading Contacts Data.....")
            self.pg.incSub("running people/orgs report.....")
            
        cp = self.cache_path
        f_def = open("def/people_orgs_def.dat")
        f_res = open("def/people_orgs_res.dat")
        contact_data = self.bsdm.runPeopleOrgsReport(f_def.read(),f_res.read(),as_of_date)
        
        if self.pg != None:            
            self.pg.incSub("finished people/orgs report.....")
            
        rep_file = open(cp + "reports/people_org_rep.cache",'w')
        cPickle.dump(contact_data,rep_file)
        rep_file.close() 
        self.loaded[2] = True
    
    def loadTransactionData(self,as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
            self.pg.addMessage("Downloading Transaction Data.....")
            self.pg.incSub("running transaction report.....")
            
        cp = self.cache_path
        f_def = open("def/port_trans_def.dat")
        f_res = open("def/port_trans_res.dat")
        trans_data = self.bsdm.runPortfolioTransactionsReport(f_def.read(),f_res.read(),as_of_date)
        
        if self.pg != None:            
            self.pg.incSub("finished transaction report.....")
    
        rep_file = open(cp + "reports/trans_rep.cache",'w')
        cPickle.dump(trans_data,rep_file)
        rep_file.close()
        self.loaded[3] = True
        
    def loadInternalTransactionData(self,as_of_date,product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.products.keys()))
            
        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:            
                self.pg.incSub("getting holdings for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                prod_path = cp + "products/" + self.products[p].name
                if not os.access(prod_path +"/",os.F_OK):
                    os.mkdir(prod_path)
                trans_data = {}
                holding_ids = self.bsdm.getHoldingsInProduct(p)
                for h_id in holding_ids:
                    tmp = self.bsdm.getHoldingTransactions(h_id,date(1994,1,1),as_of_date)
                    info = self.bsdm.getHoldingInformation(h_id)
                    trans_data[h_id] = [tmp,info]
            
                rep_file = open(prod_path + "/transactions.cache",'w')
                cPickle.dump(trans_data,rep_file)
                rep_file.close()
        self.loaded[4] = True
                        
    def loadMeetingsData(self,as_of_date,firm_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.firms.keys()))
            self.pg.addMessage("Downloading Meetings Data.....")
            
        cp = self.cache_path
        for f in self.firms.keys():
            if self.pg != None:            
                self.pg.incSub("getting meeting info for " + self.firms[f].name)
            if firm_name == self.firms[f].name or firm_name == "None":
                firm_path = cp + "firms/" + self.firms[f].name
                if not os.access(firm_path +"/",os.F_OK):
                    os.mkdir(firm_path)
                meeting_data = self.bsdm.getMeetingInfoByBackstopIdandType(f, "Organization")
                    
                rep_file = open(firm_path + "/meeting.cache",'w')
                cPickle.dump(meeting_data,rep_file)
                rep_file.close()
        self.loaded[5] = True
                                
    def loadExposureData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading Exposure Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting exposure data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                
                exp_data = self.bsdm.getExposureData(self.funds[f].backstop_id, self.funds[f].first_investment_date, as_of_date)
                        
                final_exp_data = []
                for cat in exp_data:
                    if cat[0] == "PORTFOLIO EXPOSURE" and cat[1] == "Portfolio Exposure":
                        final_exp_data.append(cat[2])
                    
                rep_file = open(fund_path + "/exposure_data.cache",'w')
                cPickle.dump(final_exp_data,rep_file)
                rep_file.close()
        self.loaded[6] = True

    def loadHoldingsData(self,as_of_date,product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.products.keys()))
            self.pg.addMessage("Downloading Holdings Data.....")
            
        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:            
                self.pg.incSub("getting holdings data for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                fund_path = cp + "products/" + self.products[p].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                holding_data = {}
                
                holding_ids = self.bsdm.getHoldingIds(self.products[p].backstop_id)
                for h_id in holding_ids:
                    info = self.bsdm.getHoldingInformation(h_id)
                    bal = self.bsdm.getHoldingBalances(h_id,as_of_date,as_of_date)
                    holding_data[h_id] = [info,bal]
                
                rep_file = open(fund_path + "/holding_data.cache",'w')
                cPickle.dump(holding_data,rep_file)
                rep_file.close()
        self.loaded[7] = True
            
    def loadAumData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
            
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading AUM Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting aum data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                aum_data = self.bsdm.getHedgeFundAums(self.funds[f].backstop_id, date(1990,1,1), as_of_date)
                    
                rep_file = open(fund_path + "/aum_data.cache",'w')
                cPickle.dump(aum_data,rep_file)
                rep_file.close()
        self.loaded[8] = True
        
    def loadReturnsData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading Returns Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting returns data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)            
                return_data = self.bsdm.getHedgeFundReturns(self.funds[f].backstop_id, self.funds[f].incept_date, as_of_date)
                        
                rep_file = open(fund_path + "/return_data.cache",'w')
                cPickle.dump(return_data,rep_file)
                rep_file.close()
        self.loaded[9] = True
    
    def loadStatisticsData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading statistics Data.....")
           
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting statistics data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund = self.funds[f]
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                stats = StatisticsCalculations(self.bsdm,f,fund.first_investment_date,as_of_date)
                stats_data = {}
                
                try:
                    stats_data["Compound Return"] = [stats.CompoundReturn()]
                except:
                    stats_data["Compound Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Geometric Return"] = [stats.AnnualizedGeometricReturn()]
                except:
                    stats_data["Annualized Geometric Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [stats.AnnualizedArithmeticStandardDeviation()]
                except:
                    stats_data["Annualized Arithmetic Standard Deviation"] = ["N/A"]
                    pass
                try:
                    stats_data["Beta"] = [stats.Beta()]
                except:
                    stats_data["Beta"] = ["N/A"]
                    pass
                try:
                    stats_data["Alpha"] = [stats.AnnualizedAlpha()]
                except:
                    stats_data["Alpha"] = ["N/A"]
                    pass
                try:
                    stats_data["Correlation Coefficient"] = [stats.CorrelationCoefficient()]
                except:
                    stats_data["Correlation Coefficient"] = ["N/A"]
                    pass
                try:
                    stats_data["RSquared"] = [stats.RSquared()]
                except:
                    stats_data["RSquared"] = ["N/A"]
                    pass
                try:
                    stats_data["Max Drawdown"] = [stats.MaxDrawDown()]
                except:
                    stats_data["Max Drawdown"] = ["N/A"]
                    pass
                try:
                    stats_data["Sharpe Ratio"] = [stats.SharpeRatio()]
                except:
                    stats_data["Sharpe Ratio"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Up-Capture"] = [stats.AnnualizedUpCapture()]
                except:
                    stats_data["Annualized Up-Capture"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Down-Capture"] = [stats.AnnualizedDownCapture()]
                except:
                    stats_data["Annualized Down-Capture"] = ["N/A"]
                    pass
                rep_file = open(fund_path + "/stats_data.cache",'w')
                cPickle.dump(stats_data,rep_file)
                rep_file.close()
        self.loaded[10] = True
class StatsCacheUpdater:
    def __init__(self,
                 cache_path=("C:/Documents and Settings/" +
                             os.getenv("username") + "/cache_root/Stats")):
        if cache_path[len(cache_path) -
                      1] != "/" and cache_path[len(cache_path) - 1] != "\\":
            cache_path += "/"
        self.cache_path = cache_path
        t = date.today()
        self.cur_date = date(t.year, t.month, 15)

        self.get = BackStopDataManager()
        self.months = [
            "Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug",
            "Sep", "Oct", "Nov"
        ]
        self.checkFolders()
        self.loadData()

    def syncCache(self, source_path):
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[
            0:len(self.cache_path) - 1] + "\" /e /i /h"
        os.system(cmd_str)

    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "returns", os.F_OK):
            os.makedirs(cp + "returns")
        if not os.access(cp + "lists", os.F_OK):
            os.makedirs(cp + "lists")
        if not os.access(cp + "pg_lists", os.F_OK):
            os.makedirs(cp + "pg_lists")

    def loadData(self):
        self.returns_index = {}
        self.fund_return_dates = {}
        if os.access(self.cache_path + "returns/returns.cache", os.F_OK):
            ret_cache_file = open(self.cache_path + "returns/returns.cache")
            self.returns_index = cPickle.load(ret_cache_file)
            ret_cache_file.close()
            self.loadFundReturnDates()

    def loadFundReturnDates(self):
        for year in self.returns_index.keys():
            for month in self.returns_index[year].keys():
                for id in self.returns_index[year][month].keys():
                    if id not in self.fund_return_dates.keys():
                        self.fund_return_dates[id] = []
                    if "RMF" in self.returns_index[year][month][id].keys():
                        if date(year, month,
                                1) not in self.fund_return_dates[id]:
                            self.fund_return_dates[id].append(
                                date(year, month, 1))

    def updateReturns(self, as_of_date=date.today()):
        if self.equalOrAfter(as_of_date, date.today()):
            as_of_date = date.today()

        if not os.access(self.cache_path + "lists/active_funds.cache",
                         os.F_OK):
            self.updateLists()

        tmp_file = open(self.cache_path + "lists/active_funds.cache")
        active_funds = cPickle.load(tmp_file)
        tmp_file.close()

        benchmarks = [496539, 496605]

        returns = {}

        for b_id in benchmarks:
            tmp_returns = self.get.getHedgeFundReturns(b_id, as_of_date,
                                                       as_of_date)
            if tmp_returns != False:
                returns[b_id] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[b_id]["RMF"] = month[0][1] + 1
                        returns[b_id]["RF"] = month[0][1]

        for fund_var in range(0, len(active_funds)):
            if fund_var % 25 == 0:
                print 100.0 * float(fund_var) / float(len(active_funds)), "%"
            fund = active_funds[fund_var]
            tmp_returns = self.get.getHedgeFundReturns(fund.ID, as_of_date,
                                                       as_of_date)
            if tmp_returns != False:
                returns[fund.ID] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[fund.ID]["RMF"] = month[0][1] + 1
                        returns[fund.ID]["RF"] = month[0][1]

        if not as_of_date.year in self.returns_index.keys():
            self.returns_index[as_of_date.year] = {}

        self.returns_index[as_of_date.year][as_of_date.month] = returns

        self.loadFundReturnDates()

        ret_file = open(self.cache_path + "returns/returns.cache", 'w')
        cPickle.dump(self.returns_index, ret_file)
        ret_file.close()

    def updatePGList(self):
        ids = self.get.getPeerGroupIds()
        id_name_map = dict()
        for id in ids:
            if "{Stats}" in id[1]:
                id_name_map[id[0]] = id[1]

        file = open(
            self.cache_path + "pg_lists/cache-peer_group_id_name_map.cache",
            "w")
        cPickle.dump(id_name_map, file)
        file.close()

        pg_member_ids = {}
        for pg_id in id_name_map.keys():
            info = self.get.getPeerGroupMemberIds(pg_id, date.today())
            id_list = []
            for i in info:
                id_list.append(i[0])
            pg_member_ids[pg_id] = id_list

        file = open(
            self.cache_path + "pg_lists/cache-peer_group_member_id.cache", "w")
        cPickle.dump(pg_member_ids, file)
        file.close()

    def updateMCPFAIList(self):
        MCPFAI = []

        file_def = open("def/MCPFAI_def.dat")
        file_res = open("def/MCPFAI_res.dat")
        report_def = file_def.read()
        report_res = file_res.read()
        info = self.get.runFundsReport(report_def, report_res, date.today())

        for data in info:
            fund = Fund()
            fund.Name = data[0][1]
            fund.ID = data[1][1]
            fund.DMRR = datetime.strptime(data[2][1], "%m/%d/%Y")
            fund.incept_date = datetime.strptime(data[3][1], "%m/%d/%Y")
            fund.IL = "MCP Fund or Index"
            fund.Class = "MCP Fund or Index"
            fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)
            MCPFAI.append(fund)

        tmp_file = open(self.cache_path + "lists/MCPFAI_funds.cache", 'w')
        cPickle.dump(MCPFAI, tmp_file)
        tmp_file.close()

    def updateLists(self):
        self.updatePGList()
        self.updateMCPFAIList()
        self.updateFundList()

    def updateFundList(self):
        file_def = open("def/all_funds_def.dat")
        report_def = file_def.read()
        report_res = "${true}"
        t = date.today()

        info = self.get.runFundsReport(report_def, report_res, self.cur_date)

        funds = []
        active_funds = []
        fund_index = {}

        invested_funds = []
        focus_list_funds = []
        inv_and_focus_funds = []
        ex_focus_funds = []
        ex_invested_funds = []
        ex_inv_and_focus_funds = []

        for data in info:
            if data[2][1] != 'None':
                fund = Fund()
                fund.Name = data[0][1]
                fund.ID = data[1][1]
                fund.DMRR = datetime.strptime(data[2][1], "%m/%d/%Y")
                fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)
                fund.IL = data[4][1]
                fund.StrengthRating = data[5][1]
                fund.Class = data[6][1]
                fund.prime_strat = data[7][1]
                fund.sub_strat1 = data[8][1]
                fund.sub_strat2 = data[9][1]
                fund.city = data[10][1]
                tmp_state = self.getState(data[12][1])
                if tmp_state == "None":
                    fund.state = data[11][1]
                else:
                    fund.state = tmp_state
                fund.postal_code = data[12][1]
                fund.incept_date = datetime.strptime(data[13][1], "%m/%d/%Y")
                fund.part_of_pmg = str.lower(str(data[14][1])).strip()
                if fund.part_of_pmg == "yes":
                    fund.part_of_pmg = True
                else:
                    fund.part_of_pmg = False
                funds.append(fund)

        for fund in funds:
            fund.ListName = fund.Name + "                                                                                                              (MRR:" + self.dateFormat(
                fund.DMRR, "LIST") + ")"
            fund.ListName += "(Int:" + fund.IL + ")"
            fund.ListName += "(Str:" + fund.StrengthRating + ")"
            fund.ListName += "(Cls:" + fund.Class + ")"
            fund.ListName += "(Strat:" + fund.prime_strat + ")"
            fund.ListName += "(Sub1:" + fund.sub_strat1 + ")"
            fund.ListName += "(Sub2:" + fund.sub_strat2 + ")"
            fund.ListName += "(City:" + fund.city + ")"
            fund.ListName += "(State:" + fund.state + ")"
            fund.ListName += "(Zip:" + fund.postal_code + ")"
            fund_index[fund.ID] = fund
            if self.equalOrAfter(
                    fund.DMRR,
                    datetime(self.cur_date.year - 1, self.cur_date.month,
                             self.cur_date.day)):
                active_funds.append(fund)
            if fund.IL == "Invested" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                invested_funds.append(fund)
                ex_focus_funds.append(fund)
            elif fund.IL == "Focus List" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                focus_list_funds.append(fund)
                ex_invested_funds.append
            else:
                ex_invested_funds.append(fund)
                ex_focus_funds.append(fund)
                ex_inv_and_focus_funds.append(fund)

        invested_funds = sorted(invested_funds, key=lambda Fund: Fund.Name)
        focus_list_funds = sorted(focus_list_funds, key=lambda Fund: Fund.Name)
        inv_and_focus_funds = sorted(inv_and_focus_funds,
                                     key=lambda Fund: Fund.Name)
        ex_focus_funds = sorted(ex_focus_funds, key=lambda Fund: Fund.Name)
        ex_invested_funds = sorted(ex_invested_funds,
                                   key=lambda Fund: Fund.Name)
        ex_inv_and_focus_funds = sorted(ex_inv_and_focus_funds,
                                        key=lambda Fund: Fund.Name)
        active_funds = sorted(active_funds, key=lambda Fund: Fund.Name)
        funds = sorted(funds, key=lambda Fund: Fund.Name)

        file = open(self.cache_path + "lists/" + "all_funds.cache", "w")
        cPickle.dump(funds, file)
        file.close()

        file = open(self.cache_path + "lists/" + "active_funds.cache", "w")
        cPickle.dump(active_funds, file)
        file.close()

        file = open(self.cache_path + "lists/invested_funds.cache", "w")
        cPickle.dump(invested_funds, file)
        file.close()

        file = open(self.cache_path + "lists/focus_list_funds.cache", "w")
        cPickle.dump(focus_list_funds, file)
        file.close()

        file = open(self.cache_path + "lists/inv_and_focus_funds.cache", "w")
        cPickle.dump(inv_and_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-focus.cache", "w")
        cPickle.dump(ex_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-invested.cache", "w")
        cPickle.dump(ex_invested_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-invested_and_focus.cache", "w")
        cPickle.dump(ex_inv_and_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/all_fund_index.cache", "w")
        cPickle.dump(fund_index, file)
        file.close()

    def getActualDMRR(self, id, dmrr):
        tmp_dmrr = dmrr

        if id in self.fund_return_dates.keys():
            dates = self.fund_return_dates[id]
        elif str(id) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[str(id)]
        elif int(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[int(str(id))]
        elif unicode(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[unicode(str(id))]
        else:
            dates = []
        print "dates = ", dates
        if len(dates) > 0:
            dates = sorted(dates, reverse=True)
            if self.equalOrAfter(dates[0], dmrr):
                tmp_dmrr = dates[0]
        return tmp_dmrr

    def equalOrAfter(self, question, bench):
        if question.year > bench.year or question.year == bench.year and question.month >= bench.month:
            return True
        else:
            return False

    def prevMonthYear(self, offset, cur_date=datetime.now()):
        year = 0
        if cur_date.month == 12:
            year = cur_date.year + 1
        else:
            year = cur_date.year
        return year - offset

    def prevMonth(self, cur_date=datetime.now()):
        month = 0
        if cur_date.month == 1:
            month = 11
        else:
            month = cur_date.month - 2
        return month

    def getEndOfMonth(self, month, year):
        num_days = 0
        if month == 4 or month == 6 or month == 9 or month == 11:
            num_days = 30
        elif month == 2:
            if calendar.isleap(year):
                num_days = 29
            else:
                num_days = 28
        else:
            num_days = 31
        return num_days

    def dateFormat(self, cur_date, type="DEFAULT"):
        formatted_str = ""
        if type == "LIST":
            formatted_str = self.months[cur_date.month % 12] + "'" + str(
                cur_date.year)[2:]
        elif type == "DEFAULT":
            year = cur_date.year - 2000
            if year < 10:
                year = "0" + str(year)
            else:
                year = str(year)
            formatted_str = months[cur_date.month % 12] + "-" + year
        return formatted_str

    def getState(self, zip_code):
        prefix = str(zip_code)[0:3]
        try:
            prefix = int(prefix)
        except:
            prefix = -1
        if prefix >= 995:
            state = "Alaska"
        elif prefix >= 980:
            state = "Washington"
        elif prefix >= 970:
            state = "Oregon"
        elif prefix >= 967:
            state = "Hawaii"
        elif prefix >= 962:
            state = "None"
        elif prefix >= 900:
            state = "California"
        elif prefix >= 889:
            state = "Nevada"
        elif prefix == 885:
            state = "Texas"
        elif prefix >= 870:
            state = "New Mexico"
        elif prefix >= 850:
            state = "Arizona"
        elif prefix >= 840:
            state = "Utah"
        elif prefix >= 832:
            state = "Idaho"
        elif prefix >= 820:
            state = "Wyoming"
        elif prefix >= 800:
            state = "Colorado"
        elif prefix >= 750:
            state = "Texas"
        elif prefix >= 730:
            state = "Oklahoma"
        elif prefix >= 716:
            state = "Arkansas"
        elif prefix >= 700:
            state = "Lousiana"
        elif prefix >= 680:
            state = "Nebraska"
        elif prefix >= 660:
            state = "Kansas"
        elif prefix >= 630:
            state = "Missouri"
        elif prefix >= 600:
            state = "Illinois"
        elif prefix >= 590:
            state = "Montana"
        elif prefix >= 580:
            state = "North Dakota"
        elif prefix >= 570:
            state = "South Dakota"
        elif prefix >= 569:
            state = "Washington D.C."
        elif prefix >= 550:
            state = "Minnesota"
        elif prefix >= 530:
            state = "Wisconsin"
        elif prefix >= 500:
            state = "Iowa"
        elif prefix >= 480:
            state = "Michigan"
        elif prefix >= 460:
            state = "Indiana"
        elif prefix >= 430:
            state = "Ohio"
        elif prefix >= 400:
            state = "Kentucky"
        elif prefix >= 398:
            state = "Georgia"
        elif prefix >= 386:
            state = "Mississippi"
        elif prefix >= 370:
            state = "Tennessee"
        elif prefix >= 350:
            state = "Alabama"
        elif prefix >= 320:
            state = "Florida"
        elif prefix >= 300:
            state = "Georgia"
        elif prefix >= 290:
            state = "South Carolina"
        elif prefix >= 270:
            state = "North Carolina"
        elif prefix >= 247:
            state = "West Virginia"
        elif prefix >= 220:
            state = "Virginia"
        elif prefix >= 206:
            state = "Maryland"
        elif prefix >= 202:
            state = "Fed. Government"
        elif prefix >= 201:
            state = "Virginia"
        elif prefix >= 200:
            state = "Washington D.C."
        elif prefix >= 197:
            state = "Delaware"
        elif prefix >= 150:
            state = "Pennsylvania"
        elif prefix >= 100:
            state = "New York"
        elif prefix >= 90:
            state = "None"
        elif prefix >= 70:
            state = "New Jersey"
        elif prefix >= 60:
            state = "Connecticut"
        elif prefix >= 50:
            state = "Vermont"
        elif prefix >= 39:
            state = "Maine"
        elif prefix >= 30:
            state = "New Hampshire"
        elif prefix >= 28:
            state = "Rhode Island"
        elif prefix >= 10:
            state = "Massachusetts"
        elif prefix >= 6:
            state = "Puerto Rico"
        else:
            state = "None"
        return state
class StatsCacheUpdater:    
    def __init__(self,cache_path=("C:/Documents and Settings/" + os.getenv("username") + "/cache_root/Stats")):
        if cache_path[len(cache_path)-1] != "/" and cache_path[len(cache_path)-1] != "\\":
            cache_path += "/"        
        self.cache_path = cache_path
        t = date.today()
        self.cur_date = date(t.year,t.month,15)
        
        self.get = BackStopDataManager()
        self.months = ["Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep","Oct", "Nov"]
        self.checkFolders()
        self.loadData()
    
    def syncCache(self,source_path):
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[0:len(self.cache_path)-1] + "\" /e /i /h"
        os.system(cmd_str)
    
    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "returns", os.F_OK):
            os.makedirs(cp + "returns")
        if not os.access(cp + "lists", os.F_OK):
            os.makedirs(cp + "lists")
        if not os.access(cp + "pg_lists", os.F_OK):
            os.makedirs(cp + "pg_lists")
    
    def loadData(self):
        self.returns_index = {}
        self.fund_return_dates = {}
        if os.access(self.cache_path + "returns/returns.cache",os.F_OK):
            ret_cache_file = open(self.cache_path + "returns/returns.cache")
            self.returns_index = cPickle.load(ret_cache_file)
            ret_cache_file.close()
            self.loadFundReturnDates()

    def loadFundReturnDates(self):
        for year in self.returns_index.keys():
            for month in self.returns_index[year].keys():
                for id in self.returns_index[year][month].keys():
                    if id not in self.fund_return_dates.keys():
                        self.fund_return_dates[id] = []
                    if "RMF" in self.returns_index[year][month][id].keys():
                        if date(year,month,1) not in self.fund_return_dates[id]:
                            self.fund_return_dates[id].append(date(year,month,1))
        
    def updateReturns(self,as_of_date=date.today()):
        if self.equalOrAfter(as_of_date,date.today()):
            as_of_date = date.today()
                
        if not os.access(self.cache_path + "lists/active_funds.cache",os.F_OK):
            self.updateLists()
            
        tmp_file = open(self.cache_path + "lists/active_funds.cache")
        active_funds = cPickle.load(tmp_file)
        tmp_file.close()
        
        benchmarks = [496539,496605]
        
        returns = {}
        
        for b_id in benchmarks:
            tmp_returns = self.get.getHedgeFundReturns(b_id, as_of_date, as_of_date)
            if tmp_returns != False:
                returns[b_id] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[b_id]["RMF"] = month[0][1] + 1     
                        returns[b_id]["RF"] = month[0][1]
          
        for fund_var in range(0,len(active_funds)):
            if fund_var % 25 == 0:
                print 100.0 * float(fund_var)/float(len(active_funds)), "%"
            fund = active_funds[fund_var]
            tmp_returns = self.get.getHedgeFundReturns(fund.ID, as_of_date, as_of_date)
            if tmp_returns != False: 
                returns[fund.ID] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[fund.ID]["RMF"] = month[0][1] + 1     
                        returns[fund.ID]["RF"] = month[0][1]

        if not as_of_date.year in self.returns_index.keys():
            self.returns_index[as_of_date.year] = {}
        
        self.returns_index[as_of_date.year][as_of_date.month] = returns
        
        self.loadFundReturnDates()
        
        ret_file = open(self.cache_path + "returns/returns.cache",'w')
        cPickle.dump(self.returns_index, ret_file)
        ret_file.close()
    
    def updatePGList(self):
        ids = self.get.getPeerGroupIds()
        id_name_map = dict()
        for id in ids:
            if "{Stats}" in id[1]:
                id_name_map[id[0]] = id[1]

        file = open(self.cache_path + "pg_lists/cache-peer_group_id_name_map.cache", "w")
        cPickle.dump(id_name_map,file)
        file.close()
        
        pg_member_ids = {}
        for pg_id in id_name_map.keys():
            info = self.get.getPeerGroupMemberIds(pg_id,date.today())
            id_list = []
            for i in info:
                id_list.append(i[0])
            pg_member_ids[pg_id] = id_list

        file = open(self.cache_path + "pg_lists/cache-peer_group_member_id.cache", "w")
        cPickle.dump(pg_member_ids,file)
        file.close()
    
    def updateMCPFAIList(self):
        MCPFAI = []
        
        file_def = open("def/MCPFAI_def.dat")
        file_res = open("def/MCPFAI_res.dat")
        report_def = file_def.read()
        report_res = file_res.read()
        info = self.get.runFundsReport(report_def, report_res, date.today())
        
        for data in info:
            fund = Fund()
            fund.Name = data[0][1]
            fund.ID = data[1][1]
            fund.DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
            fund.incept_date = datetime.strptime(data[3][1],"%m/%d/%Y")
            fund.IL = "MCP Fund or Index"
            fund.Class = "MCP Fund or Index"
            fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)                                
            MCPFAI.append(fund)
        
        tmp_file = open(self.cache_path + "lists/MCPFAI_funds.cache",'w')
        cPickle.dump(MCPFAI, tmp_file)
        tmp_file.close()
        
    def updateLists(self):
        self.updatePGList()
        self.updateMCPFAIList()
        self.updateFundList()
    
    def updateFundList(self):        
        file_def = open("def/all_funds_def.dat")
        report_def = file_def.read()
        report_res = "${true}"
        t = date.today()

        info = self.get.runFundsReport(report_def, report_res, self.cur_date)
        
        funds = []
        active_funds = []
        fund_index = {}
        
        invested_funds = []
        focus_list_funds = []
        inv_and_focus_funds = []
        ex_focus_funds = []
        ex_invested_funds = []
        ex_inv_and_focus_funds = []
        
        for data in info:
            if data[2][1] != 'None':
                fund = Fund()
                fund.Name = data[0][1] 
                fund.ID = data[1][1]
                fund.DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
                fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)   
                fund.IL = data[4][1]
                fund.StrengthRating = data[5][1]
                fund.Class = data[6][1]
                fund.prime_strat = data[7][1]
                fund.sub_strat1 = data[8][1]
                fund.sub_strat2 = data[9][1]
                fund.city = data[10][1]
                tmp_state = self.getState(data[12][1])
                if tmp_state == "None":
                    fund.state = data[11][1]
                else:
                    fund.state = tmp_state
                fund.postal_code = data[12][1]
                fund.incept_date = datetime.strptime(data[13][1],"%m/%d/%Y")
                fund.part_of_pmg = str.lower(str(data[14][1])).strip()
                if fund.part_of_pmg == "yes":
                    fund.part_of_pmg = True
                else:
                    fund.part_of_pmg = False
                funds.append(fund)
        
        for fund in funds:
            fund.ListName = fund.Name + "                                                                                                              (MRR:" + self.dateFormat(fund.DMRR,"LIST") + ")"
            fund.ListName += "(Int:" + fund.IL + ")"
            fund.ListName += "(Str:" + fund.StrengthRating + ")"
            fund.ListName += "(Cls:" + fund.Class + ")"
            fund.ListName += "(Strat:" + fund.prime_strat + ")"
            fund.ListName += "(Sub1:" + fund.sub_strat1 + ")"
            fund.ListName += "(Sub2:" + fund.sub_strat2 + ")"
            fund.ListName += "(City:" + fund.city +")"
            fund.ListName += "(State:" + fund.state + ")"
            fund.ListName += "(Zip:" + fund.postal_code + ")"
            fund_index[fund.ID] = fund
            if self.equalOrAfter(fund.DMRR,datetime(self.cur_date.year-1,self.cur_date.month,self.cur_date.day)):
                active_funds.append(fund)
            if fund.IL == "Invested" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                invested_funds.append(fund)
                ex_focus_funds.append(fund)
            elif fund.IL == "Focus List" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                focus_list_funds.append(fund)
                ex_invested_funds.append
            else:
                ex_invested_funds.append(fund)
                ex_focus_funds.append(fund)
                ex_inv_and_focus_funds.append(fund)
        
        invested_funds = sorted(invested_funds, key=lambda Fund:Fund.Name)
        focus_list_funds = sorted(focus_list_funds, key=lambda Fund:Fund.Name)
        inv_and_focus_funds = sorted(inv_and_focus_funds, key=lambda Fund:Fund.Name)
        ex_focus_funds = sorted(ex_focus_funds, key=lambda Fund:Fund.Name)
        ex_invested_funds = sorted(ex_invested_funds, key=lambda Fund:Fund.Name)
        ex_inv_and_focus_funds = sorted(ex_inv_and_focus_funds, key=lambda Fund:Fund.Name)
        active_funds = sorted(active_funds, key=lambda Fund:Fund.Name)
        funds = sorted(funds, key=lambda Fund:Fund.Name)
        
        file = open(self.cache_path + "lists/" + "all_funds.cache", "w")
        cPickle.dump(funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/" + "active_funds.cache", "w")
        cPickle.dump(active_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/invested_funds.cache", "w")
        cPickle.dump(invested_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/focus_list_funds.cache", "w")
        cPickle.dump(focus_list_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/inv_and_focus_funds.cache", "w")
        cPickle.dump(inv_and_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-focus.cache", "w")
        cPickle.dump(ex_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-invested.cache", "w")
        cPickle.dump(ex_invested_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-invested_and_focus.cache", "w")
        cPickle.dump(ex_inv_and_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/all_fund_index.cache", "w")
        cPickle.dump(fund_index,file)
        file.close()
    
    def getActualDMRR(self,id,dmrr):
        tmp_dmrr = dmrr
        
        if id in self.fund_return_dates.keys():
            dates = self.fund_return_dates[id]
        elif str(id) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[str(id)]
        elif int(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[int(str(id))]
        elif unicode(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[unicode(str(id))]
        else:
            dates = []
        print "dates = ", dates
        if len(dates) > 0:
            dates = sorted(dates,reverse=True)
            if self.equalOrAfter(dates[0],dmrr):
                tmp_dmrr = dates[0]
        return tmp_dmrr
        
    def equalOrAfter(self,question, bench):
        if question.year > bench.year or question.year == bench.year and question.month >= bench.month:
            return True
        else:
            return False
    
    def prevMonthYear(self,offset,cur_date=datetime.now()):
        year = 0
        if cur_date.month==12:
            year = cur_date.year + 1
        else:
            year = cur_date.year
        return year - offset
    
    def prevMonth(self,cur_date=datetime.now()):
        month = 0
        if cur_date.month==1:
            month=11
        else:
            month = cur_date.month - 2
        return month
    
    def getEndOfMonth(self,month,year):
        num_days = 0
        if month == 4 or month == 6 or month == 9 or month == 11:
            num_days = 30
        elif month == 2:
            if calendar.isleap(year):
                num_days = 29
            else:
                num_days = 28
        else:
            num_days = 31
        return num_days

    def dateFormat(self,cur_date,type="DEFAULT"):
        formatted_str = ""
        if type == "LIST":
            formatted_str = self.months[cur_date.month%12] + "'" + str(cur_date.year)[2:]
        elif type == "DEFAULT":
            year = cur_date.year - 2000
            if year < 10:
                year = "0" + str(year)
            else:
                year = str(year)
            formatted_str = months[cur_date.month%12] + "-" + year
        return formatted_str    

    def getState(self,zip_code):
        prefix = str(zip_code)[0:3]
        try:
            prefix = int(prefix)
        except:
            prefix = -1
        if prefix >= 995:
            state = "Alaska"
        elif prefix >= 980:
            state = "Washington"
        elif prefix >= 970:
            state = "Oregon"
        elif prefix >= 967:
            state = "Hawaii"
        elif prefix >= 962:
            state = "None"
        elif prefix >= 900:
            state = "California"
        elif prefix >= 889:
            state = "Nevada"
        elif prefix == 885:
            state = "Texas"
        elif prefix >= 870:
            state = "New Mexico"
        elif prefix >= 850:
            state = "Arizona"
        elif prefix >= 840:
            state = "Utah"
        elif prefix >= 832:
            state = "Idaho"
        elif prefix >= 820:
            state = "Wyoming"
        elif prefix >= 800:
            state = "Colorado"
        elif prefix >= 750:
            state = "Texas"
        elif prefix >= 730:
            state = "Oklahoma"
        elif prefix >= 716:
            state = "Arkansas"
        elif prefix >= 700:
            state = "Lousiana"
        elif prefix >= 680:
            state = "Nebraska"
        elif prefix >= 660:
            state = "Kansas"
        elif prefix >= 630:
            state = "Missouri"
        elif prefix >= 600:
            state = "Illinois"
        elif prefix >= 590:
            state = "Montana"
        elif prefix >= 580:
            state = "North Dakota"
        elif prefix >= 570:
            state = "South Dakota"
        elif prefix >= 569:
            state = "Washington D.C."
        elif prefix >= 550:
            state = "Minnesota"
        elif prefix >= 530:
            state = "Wisconsin"
        elif prefix >= 500:
            state = "Iowa"
        elif prefix >= 480:
            state = "Michigan"
        elif prefix >= 460:
            state = "Indiana"
        elif prefix >= 430:
            state = "Ohio"
        elif prefix >= 400:
            state = "Kentucky"
        elif prefix >= 398:
            state = "Georgia"
        elif prefix >= 386:
            state = "Mississippi"
        elif prefix >= 370:
            state = "Tennessee"
        elif prefix >= 350:
            state = "Alabama"
        elif prefix >= 320:
            state = "Florida"
        elif prefix >= 300:
            state = "Georgia"
        elif prefix >= 290:
            state = "South Carolina"
        elif prefix >= 270:
            state = "North Carolina"
        elif prefix >= 247:
            state = "West Virginia"
        elif prefix >= 220:
            state = "Virginia"
        elif prefix >= 206:
            state = "Maryland"
        elif prefix >= 202:
            state = "Fed. Government"
        elif prefix >= 201:
            state = "Virginia"
        elif prefix >= 200:
            state = "Washington D.C."
        elif prefix >= 197:
            state = "Delaware"
        elif prefix >= 150:
            state = "Pennsylvania"
        elif prefix >= 100:
            state = "New York"
        elif prefix >= 90:
            state = "None"
        elif prefix >= 70:
            state = "New Jersey"
        elif prefix >= 60:
            state = "Connecticut"
        elif prefix >= 50:
            state = "Vermont"
        elif prefix >= 39:
            state = "Maine"
        elif prefix >= 30:
            state = "New Hampshire"
        elif prefix >= 28:
            state = "Rhode Island"
        elif prefix >= 10:
            state = "Massachusetts"
        elif prefix >= 6:
            state = "Puerto Rico"
        else:
            state = "None"
        return state