Beispiel #1
0
 def refreshFundInfo(self,fund_universe,add_managers=[],include_indices=False):
     t = date.today()
     cur_date = date(t.year,t.month,15)
     get = BackStopDataManager()
     stats_cache_updater = StatsCacheUpdater(self.cache_path)
             
     file_def = open("def/refresh_fund_def.dat")
     report_def = file_def.read()
     file_def.close()
     if fund_universe != "PeerGroup":
         if fund_universe == 1:
             file_res = open("def/refresh_inv_res.dat")
             report_res = file_res.read()
             file_res.close()
         elif fund_universe == 2:
             file_res = open("def/refresh_focus_res.dat")
             report_res = file_res.read()
             file_res.close()
         elif fund_universe == 3:
             file_res = open("def/refresh_inv_focus_res.dat")
             report_res = file_res.read()
             file_res.close()
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[0][1]].DMRR = datetime.strptime(data[1][1],"%m/%d/%Y")
             self.fund_index[data[0][1]].IL = data[2][1]
             self.fund_index[data[0][1]].Signal = data[3][1]
             self.fund_index[data[0][1]].DMRR = stats_cache_updater.getActualDMRR(data[0][1],self.fund_index[data[0][1]].DMRR)
             
     if len(add_managers) > 0:
         report_res = """${(report.field1 == """ + str(add_managers[0]) + """)"""
         for id in add_managers[1:]:
             report_res += """ && (report.field1 == """ + str(id) + """)"""
         report_res += """}"""
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[0][1]].DMRR = datetime.strptime(data[1][1],"%m/%d/%Y")
             self.fund_index[data[0][1]].IL = data[2][1]
             self.fund_index[data[0][1]].Signal = data[3][1]
             self.fund_index[data[0][1]].DMRR = stats_cache_updater.getActualDMRR(data[0][1],self.fund_index[data[0][1]].DMRR)
     
     if include_indices:
         self.MCPFAI_funds = []
         file_def = open("def/MCPFAI_def.dat")
         file_res = open("def/MCPFAI_res.dat")
         report_def = file_def.read()
         report_res = file_res.read()
         info = get.runFundsReport(report_def, report_res, cur_date)
         for data in info:
             self.fund_index[data[1][1]].DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
             self.fund_index[data[1][1]].IL = "MCP Fund or Index"
             self.fund_index[data[1][1]].Class = "MCP Fund or Index"
             self.fund_index[data[1][1]].Signal = data[4][1]
             self.fund_index[data[1][1]].DMRR = stats_cache_updater.getActualDMRR(data[1][1],self.fund_index[data[1][1]].DMRR)
             self.MCPFAI_funds.append(self.fund_index[data[1][1]])                
    def __init__(self, cache_path, backstop_only = False):
        self.backstop_only = backstop_only
        self.cache_path = cache_path

        if self.cache_path[len(self.cache_path)-1] != "/":
            self.cache_path += "/"

        self.bsdm = BackStopDataManager()
        
        self.returns_index = {}
    
        if not self.backstop_only:
            self.loadCachedReturns()
        else:
            self.returns_index = {}
            self.returns_index[""] = {}
Beispiel #3
0
    def __init__(self, cache_path, pg=None):
        self.bsdm = BackStopDataManager()
        self.cache_path = cache_path
        self.pg = pg

        if self.cache_path[len(self.cache_path) - 1] != "/":
            self.cache_path += "/"

        self.checkFolders()
        self.loaded = [
            False, False, False, False, False, False, False, False, False,
            False, False
        ]

        self.funds = {}
        self.firms = {}
        self.products = {}
    def __init__(self,
                 cache_path=("C:/Documents and Settings/" +
                             os.getenv("username") + "/cache_root/Stats")):
        if cache_path[len(cache_path) -
                      1] != "/" and cache_path[len(cache_path) - 1] != "\\":
            cache_path += "/"
        self.cache_path = cache_path
        t = date.today()
        self.cur_date = date(t.year, t.month, 15)

        self.get = BackStopDataManager()
        self.months = [
            "Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug",
            "Sep", "Oct", "Nov"
        ]
        self.checkFolders()
        self.loadData()
 def __init__(self,cache_path=("C:/Documents and Settings/" + os.getenv("username") + "/cache_root/Stats")):
     if cache_path[len(cache_path)-1] != "/" and cache_path[len(cache_path)-1] != "\\":
         cache_path += "/"        
     self.cache_path = cache_path
     t = date.today()
     self.cur_date = date(t.year,t.month,15)
     
     self.get = BackStopDataManager()
     self.months = ["Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep","Oct", "Nov"]
     self.checkFolders()
     self.loadData()
 def __init__(self,cache_path,pg=None):
     self.bsdm = BackStopDataManager()
     self.cache_path = cache_path
     self.pg = pg
     
     if self.cache_path[len(self.cache_path)-1] != "/":
         self.cache_path += "/"
          
     self.checkFolders()
     self.loaded = [False,False,False,False,False,False,False,False,False,False,False]
     
     self.funds = {}
     self.firms = {}
     self.products = {}
Beispiel #7
0
class MISCacheUpdater:
    def __init__(self, cache_path, pg=None):
        self.bsdm = BackStopDataManager()
        self.cache_path = cache_path
        self.pg = pg

        if self.cache_path[len(self.cache_path) - 1] != "/":
            self.cache_path += "/"

        self.checkFolders()
        self.loaded = [
            False, False, False, False, False, False, False, False, False,
            False, False
        ]

        self.funds = {}
        self.firms = {}
        self.products = {}

    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "reports", os.F_OK):
            os.makedirs(cp + "reports")
        if not os.access(cp + "firms", os.F_OK):
            os.makedirs(cp + "firms")
        if not os.access(cp + "funds", os.F_OK):
            os.makedirs(cp + "funds")
        if not os.access(cp + "products", os.F_OK):
            os.makedirs(cp + "products")

    def clearCache(self):
        cp = self.cache_path
        if os.access(cp, os.F_OK):
            shutil.rmtree(cp)
        self.checkFolders()

    def syncCache(self, source_path):
        s_time = time.clock()
        self.clearCache()
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[
            0:len(self.cache_path) - 1] + "\" /e /i /h /R /Y"
        os.system(cmd_str)
        print "Copy took", (time.clock() - s_time), "secs"

    def update(self, as_of_date=date.today()):
        self.clearCache()

        self.loadFundData(as_of_date)
        self.loadProductData(as_of_date)
        self.loadContactsData(as_of_date)
        self.loadTransactionData(as_of_date)
        self.loadInternalTransactionData(as_of_date)
        self.loadMeetingsData(as_of_date)
        self.loadExposureData(as_of_date)
        self.loadHoldingsData(as_of_date)
        self.loadAumData(as_of_date)
        self.loadReturnsData(as_of_date)
        self.loadStatisticsData(as_of_date)

    def loadFundData(self, as_of_date, load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)

        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/fund_def.dat")
            f_res = open("def/fund_res.dat")

            if self.pg != None:
                self.pg.addMessage("Downloading Fund Data")
                self.pg.incSub("running funds report.....")

            fund_data = self.bsdm.runFundsReport(f_def.read(), f_res.read(),
                                                 as_of_date)

            if self.pg != None:
                self.pg.incSub("finished funds report.....")

            rep_file = open(cp + "reports/fund_rep.cache", 'w')
            cPickle.dump(fund_data, rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/fund_rep.cache")
            fund_data = cPickle.load(rep_file)
            rep_file.close()

        for f in fund_data:
            tmp_fund = Fund()
            tmp_fund.backstop_id = int(f[0][1])
            tmp_fund.name = f[1][1]
            tmp_fund.first_investment_date = datetime.strptime(
                f[6][1], "%m/%d/%Y")
            tmp_fund.incept_date = datetime.strptime(f[3][1], "%m/%d/%Y")
            if int(f[30][1]) not in self.firms.keys():
                tmp_fund.firm = Firm()
                tmp_fund.firm.name = f[31][1]
                tmp_fund.firm.backstop_id = int(f[30][1])
                self.firms[int(f[30][1])] = tmp_fund.firm
            self.funds[tmp_fund.backstop_id] = tmp_fund

        self.loaded[0] = True

    def loadProductData(self, as_of_date, load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total * .4, 2.0)

        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/product_def.dat")
            f_res = open("def/product_res.dat")

            if self.pg != None:
                self.pg.addMessage("Downloading Product Data")
                self.pg.incSub("running products report.....")

            product_rep = self.bsdm.runProductsReport(f_def.read(),
                                                      f_res.read(), as_of_date)
            product_bal = {}

            if self.pg != None:
                self.pg.incSub("finished products report.....")
                self.pg.startSubProcess(self.pg.cur_sub_proc_total * .6,
                                        len(product_rep))

            for p in product_rep:
                if self.pg != None:
                    self.pg.incSub("getting " + p[1][1] + " data")
                bal = self.bsdm.getProductBalances(int(p[0][1]), date.today(),
                                                   date.today())
                product_bal[int(p[0][1])] = float(bal[0][1])

            product_data = [product_rep, product_bal]

            shutil.rmtree(cp + "products/")
            os.mkdir(cp + "products/")

            rep_file = open(cp + "reports/product_rep.cache", 'w')
            cPickle.dump(product_data, rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/product_rep.cache")
            product_data = cPickle.load(rep_file)
            product_bal = product_data[1]
            product_rep = product_data[0]
            rep_file.close()

        for p in product_rep:
            if int(p[0][1]) not in self.products.keys():
                tmp_product = Product()
                tmp_product.backstop_id = int(p[0][1])
                tmp_product.name = p[1][1]
                tmp_product.balance = product_bal[tmp_product.backstop_id]
                if tmp_product.balance > 0:
                    self.products[int(p[0][1])] = tmp_product
        self.loaded[1] = True

    def loadContactsData(self, as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)
            self.pg.addMessage("Downloading Contacts Data.....")
            self.pg.incSub("running people/orgs report.....")

        cp = self.cache_path
        f_def = open("def/people_orgs_def.dat")
        f_res = open("def/people_orgs_res.dat")
        contact_data = self.bsdm.runPeopleOrgsReport(f_def.read(),
                                                     f_res.read(), as_of_date)

        if self.pg != None:
            self.pg.incSub("finished people/orgs report.....")

        rep_file = open(cp + "reports/people_org_rep.cache", 'w')
        cPickle.dump(contact_data, rep_file)
        rep_file.close()
        self.loaded[2] = True

    def loadTransactionData(self, as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total, 2.0)
            self.pg.addMessage("Downloading Transaction Data.....")
            self.pg.incSub("running transaction report.....")

        cp = self.cache_path
        f_def = open("def/port_trans_def.dat")
        f_res = open("def/port_trans_res.dat")
        trans_data = self.bsdm.runPortfolioTransactionsReport(
            f_def.read(), f_res.read(), as_of_date)

        if self.pg != None:
            self.pg.incSub("finished transaction report.....")

        rep_file = open(cp + "reports/trans_rep.cache", 'w')
        cPickle.dump(trans_data, rep_file)
        rep_file.close()
        self.loaded[3] = True

    def loadInternalTransactionData(self, as_of_date, product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.products.keys()))

        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:
                self.pg.incSub("getting holdings for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                prod_path = cp + "products/" + self.products[p].name
                if not os.access(prod_path + "/", os.F_OK):
                    os.mkdir(prod_path)
                trans_data = {}
                holding_ids = self.bsdm.getHoldingsInProduct(p)
                for h_id in holding_ids:
                    tmp = self.bsdm.getHoldingTransactions(
                        h_id, date(1994, 1, 1), as_of_date)
                    info = self.bsdm.getHoldingInformation(h_id)
                    trans_data[h_id] = [tmp, info]

                rep_file = open(prod_path + "/transactions.cache", 'w')
                cPickle.dump(trans_data, rep_file)
                rep_file.close()
        self.loaded[4] = True

    def loadMeetingsData(self, as_of_date, firm_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.firms.keys()))
            self.pg.addMessage("Downloading Meetings Data.....")

        cp = self.cache_path
        for f in self.firms.keys():
            if self.pg != None:
                self.pg.incSub("getting meeting info for " +
                               self.firms[f].name)
            if firm_name == self.firms[f].name or firm_name == "None":
                firm_path = cp + "firms/" + self.firms[f].name
                if not os.access(firm_path + "/", os.F_OK):
                    os.mkdir(firm_path)
                meeting_data = self.bsdm.getMeetingInfoByBackstopIdandType(
                    f, "Organization")

                rep_file = open(firm_path + "/meeting.cache", 'w')
                cPickle.dump(meeting_data, rep_file)
                rep_file.close()
        self.loaded[5] = True

    def loadExposureData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading Exposure Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting exposure data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)

                exp_data = self.bsdm.getExposureData(
                    self.funds[f].backstop_id,
                    self.funds[f].first_investment_date, as_of_date)

                final_exp_data = []
                for cat in exp_data:
                    if cat[0] == "PORTFOLIO EXPOSURE" and cat[
                            1] == "Portfolio Exposure":
                        final_exp_data.append(cat[2])

                rep_file = open(fund_path + "/exposure_data.cache", 'w')
                cPickle.dump(final_exp_data, rep_file)
                rep_file.close()
        self.loaded[6] = True

    def loadHoldingsData(self, as_of_date, product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.products.keys()))
            self.pg.addMessage("Downloading Holdings Data.....")

        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:
                self.pg.incSub("getting holdings data for " +
                               self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                fund_path = cp + "products/" + self.products[p].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                holding_data = {}

                holding_ids = self.bsdm.getHoldingIds(
                    self.products[p].backstop_id)
                for h_id in holding_ids:
                    info = self.bsdm.getHoldingInformation(h_id)
                    bal = self.bsdm.getHoldingBalances(h_id, as_of_date,
                                                       as_of_date)
                    holding_data[h_id] = [info, bal]

                rep_file = open(fund_path + "/holding_data.cache", 'w')
                cPickle.dump(holding_data, rep_file)
                rep_file.close()
        self.loaded[7] = True

    def loadAumData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading AUM Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting aum data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                aum_data = self.bsdm.getHedgeFundAums(
                    self.funds[f].backstop_id, date(1990, 1, 1), as_of_date)

                rep_file = open(fund_path + "/aum_data.cache", 'w')
                cPickle.dump(aum_data, rep_file)
                rep_file.close()
        self.loaded[8] = True

    def loadReturnsData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading Returns Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting returns data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                return_data = self.bsdm.getHedgeFundReturns(
                    self.funds[f].backstop_id, self.funds[f].incept_date,
                    as_of_date)

                rep_file = open(fund_path + "/return_data.cache", 'w')
                cPickle.dump(return_data, rep_file)
                rep_file.close()
        self.loaded[9] = True

    def loadStatisticsData(self, as_of_date, fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date, True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date, True)

        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,
                                    len(self.funds.keys()))
            self.pg.addMessage("Downloading statistics Data.....")

        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:
                self.pg.incSub("getting statistics data for " +
                               self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund = self.funds[f]
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path + "/", os.F_OK):
                    os.mkdir(fund_path)
                stats = StatisticsCalculations(self.bsdm, f,
                                               fund.first_investment_date,
                                               as_of_date)
                stats_data = {}

                try:
                    stats_data["Compound Return"] = [stats.CompoundReturn()]
                except:
                    stats_data["Compound Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Geometric Return"] = [
                        stats.AnnualizedGeometricReturn()
                    ]
                except:
                    stats_data["Annualized Geometric Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [
                        stats.AnnualizedArithmeticStandardDeviation()
                    ]
                except:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [
                        "N/A"
                    ]
                    pass
                try:
                    stats_data["Beta"] = [stats.Beta()]
                except:
                    stats_data["Beta"] = ["N/A"]
                    pass
                try:
                    stats_data["Alpha"] = [stats.AnnualizedAlpha()]
                except:
                    stats_data["Alpha"] = ["N/A"]
                    pass
                try:
                    stats_data["Correlation Coefficient"] = [
                        stats.CorrelationCoefficient()
                    ]
                except:
                    stats_data["Correlation Coefficient"] = ["N/A"]
                    pass
                try:
                    stats_data["RSquared"] = [stats.RSquared()]
                except:
                    stats_data["RSquared"] = ["N/A"]
                    pass
                try:
                    stats_data["Max Drawdown"] = [stats.MaxDrawDown()]
                except:
                    stats_data["Max Drawdown"] = ["N/A"]
                    pass
                try:
                    stats_data["Sharpe Ratio"] = [stats.SharpeRatio()]
                except:
                    stats_data["Sharpe Ratio"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Up-Capture"] = [
                        stats.AnnualizedUpCapture()
                    ]
                except:
                    stats_data["Annualized Up-Capture"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Down-Capture"] = [
                        stats.AnnualizedDownCapture()
                    ]
                except:
                    stats_data["Annualized Down-Capture"] = ["N/A"]
                    pass
                rep_file = open(fund_path + "/stats_data.cache", 'w')
                cPickle.dump(stats_data, rep_file)
                rep_file.close()
        self.loaded[10] = True
class MISCacheUpdater:
    def __init__(self,cache_path,pg=None):
        self.bsdm = BackStopDataManager()
        self.cache_path = cache_path
        self.pg = pg
        
        if self.cache_path[len(self.cache_path)-1] != "/":
            self.cache_path += "/"
             
        self.checkFolders()
        self.loaded = [False,False,False,False,False,False,False,False,False,False,False]
        
        self.funds = {}
        self.firms = {}
        self.products = {}
    
    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "reports", os.F_OK):
            os.makedirs(cp + "reports")
        if not os.access(cp + "firms", os.F_OK):
            os.makedirs(cp + "firms")
        if not os.access(cp + "funds", os.F_OK):
            os.makedirs(cp + "funds")
        if not os.access(cp + "products", os.F_OK):
            os.makedirs(cp + "products")
        
    def clearCache(self):
        cp = self.cache_path
        if os.access(cp, os.F_OK):
            shutil.rmtree(cp)
        self.checkFolders()
    
    def syncCache(self,source_path):
        s_time = time.clock()
        self.clearCache()
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[0:len(self.cache_path)-1] + "\" /e /i /h /R /Y"
        os.system(cmd_str)
        print "Copy took",(time.clock() - s_time),"secs"
        
    def update(self,as_of_date=date.today()):
        self.clearCache()
        
        self.loadFundData(as_of_date)
        self.loadProductData(as_of_date)
        self.loadContactsData(as_of_date)
        self.loadTransactionData(as_of_date)
        self.loadInternalTransactionData(as_of_date)
        self.loadMeetingsData(as_of_date)
        self.loadExposureData(as_of_date)
        self.loadHoldingsData(as_of_date)
        self.loadAumData(as_of_date)
        self.loadReturnsData(as_of_date)
        self.loadStatisticsData(as_of_date)
        
    def loadFundData(self,as_of_date,load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
        
        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/fund_def.dat")
            f_res = open("def/fund_res.dat")
            
            if self.pg != None:
                self.pg.addMessage("Downloading Fund Data")
                self.pg.incSub("running funds report.....")
            
            fund_data = self.bsdm.runFundsReport(f_def.read(),f_res.read(),as_of_date)
            
            if self.pg != None:
                self.pg.incSub("finished funds report.....")
            
            rep_file = open(cp + "reports/fund_rep.cache",'w')
            cPickle.dump(fund_data,rep_file)
            rep_file.close()
        else:
            rep_file = open(cp + "reports/fund_rep.cache")
            fund_data = cPickle.load(rep_file)
            rep_file.close()
        
        for f in fund_data:
            tmp_fund = Fund()
            tmp_fund.backstop_id = int(f[0][1])
            tmp_fund.name = f[1][1]
            tmp_fund.first_investment_date = datetime.strptime(f[6][1],"%m/%d/%Y")
            tmp_fund.incept_date = datetime.strptime(f[3][1],"%m/%d/%Y")
            if int(f[30][1]) not in self.firms.keys():
                tmp_fund.firm = Firm()
                tmp_fund.firm.name = f[31][1]
                tmp_fund.firm.backstop_id = int(f[30][1])
                self.firms[int(f[30][1])] = tmp_fund.firm
            self.funds[tmp_fund.backstop_id] = tmp_fund

        self.loaded[0] = True
    
    def loadProductData(self,as_of_date,load_from_cache=False):
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total*.4,2.0)
        
        cp = self.cache_path
        if not load_from_cache:
            f_def = open("def/product_def.dat")
            f_res = open("def/product_res.dat")
            
            if self.pg != None:
                self.pg.addMessage("Downloading Product Data")
                self.pg.incSub("running products report.....")
                
            product_rep = self.bsdm.runProductsReport(f_def.read(),f_res.read(),as_of_date)
            product_bal = {}
            
            if self.pg != None:
                self.pg.incSub("finished products report.....")
                self.pg.startSubProcess(self.pg.cur_sub_proc_total*.6,len(product_rep))
            
            for p in product_rep:
                if self.pg != None:
                    self.pg.incSub("getting " + p[1][1] + " data")
                bal = self.bsdm.getProductBalances(int(p[0][1]),date.today(),date.today())
                product_bal[int(p[0][1])] = float(bal[0][1])
            
            product_data = [product_rep,product_bal]
            
            shutil.rmtree(cp + "products/")
            os.mkdir(cp + "products/")
        
            rep_file = open(cp + "reports/product_rep.cache",'w')
            cPickle.dump(product_data,rep_file)
            rep_file.close() 
        else:
            rep_file = open(cp + "reports/product_rep.cache")
            product_data = cPickle.load(rep_file)
            product_bal = product_data[1]
            product_rep = product_data[0]            
            rep_file.close()
        
        for p in product_rep:
            if int(p[0][1]) not in self.products.keys():
                tmp_product = Product()
                tmp_product.backstop_id = int(p[0][1])
                tmp_product.name = p[1][1]
                tmp_product.balance = product_bal[tmp_product.backstop_id]
                if tmp_product.balance > 0:
                    self.products[int(p[0][1])] = tmp_product
        self.loaded[1] = True

    def loadContactsData(self,as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
            
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
            self.pg.addMessage("Downloading Contacts Data.....")
            self.pg.incSub("running people/orgs report.....")
            
        cp = self.cache_path
        f_def = open("def/people_orgs_def.dat")
        f_res = open("def/people_orgs_res.dat")
        contact_data = self.bsdm.runPeopleOrgsReport(f_def.read(),f_res.read(),as_of_date)
        
        if self.pg != None:            
            self.pg.incSub("finished people/orgs report.....")
            
        rep_file = open(cp + "reports/people_org_rep.cache",'w')
        cPickle.dump(contact_data,rep_file)
        rep_file.close() 
        self.loaded[2] = True
    
    def loadTransactionData(self,as_of_date):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,2.0)
            self.pg.addMessage("Downloading Transaction Data.....")
            self.pg.incSub("running transaction report.....")
            
        cp = self.cache_path
        f_def = open("def/port_trans_def.dat")
        f_res = open("def/port_trans_res.dat")
        trans_data = self.bsdm.runPortfolioTransactionsReport(f_def.read(),f_res.read(),as_of_date)
        
        if self.pg != None:            
            self.pg.incSub("finished transaction report.....")
    
        rep_file = open(cp + "reports/trans_rep.cache",'w')
        cPickle.dump(trans_data,rep_file)
        rep_file.close()
        self.loaded[3] = True
        
    def loadInternalTransactionData(self,as_of_date,product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.products.keys()))
            
        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:            
                self.pg.incSub("getting holdings for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                prod_path = cp + "products/" + self.products[p].name
                if not os.access(prod_path +"/",os.F_OK):
                    os.mkdir(prod_path)
                trans_data = {}
                holding_ids = self.bsdm.getHoldingsInProduct(p)
                for h_id in holding_ids:
                    tmp = self.bsdm.getHoldingTransactions(h_id,date(1994,1,1),as_of_date)
                    info = self.bsdm.getHoldingInformation(h_id)
                    trans_data[h_id] = [tmp,info]
            
                rep_file = open(prod_path + "/transactions.cache",'w')
                cPickle.dump(trans_data,rep_file)
                rep_file.close()
        self.loaded[4] = True
                        
    def loadMeetingsData(self,as_of_date,firm_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.firms.keys()))
            self.pg.addMessage("Downloading Meetings Data.....")
            
        cp = self.cache_path
        for f in self.firms.keys():
            if self.pg != None:            
                self.pg.incSub("getting meeting info for " + self.firms[f].name)
            if firm_name == self.firms[f].name or firm_name == "None":
                firm_path = cp + "firms/" + self.firms[f].name
                if not os.access(firm_path +"/",os.F_OK):
                    os.mkdir(firm_path)
                meeting_data = self.bsdm.getMeetingInfoByBackstopIdandType(f, "Organization")
                    
                rep_file = open(firm_path + "/meeting.cache",'w')
                cPickle.dump(meeting_data,rep_file)
                rep_file.close()
        self.loaded[5] = True
                                
    def loadExposureData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading Exposure Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting exposure data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                
                exp_data = self.bsdm.getExposureData(self.funds[f].backstop_id, self.funds[f].first_investment_date, as_of_date)
                        
                final_exp_data = []
                for cat in exp_data:
                    if cat[0] == "PORTFOLIO EXPOSURE" and cat[1] == "Portfolio Exposure":
                        final_exp_data.append(cat[2])
                    
                rep_file = open(fund_path + "/exposure_data.cache",'w')
                cPickle.dump(final_exp_data,rep_file)
                rep_file.close()
        self.loaded[6] = True

    def loadHoldingsData(self,as_of_date,product_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.products.keys()))
            self.pg.addMessage("Downloading Holdings Data.....")
            
        cp = self.cache_path
        for p in self.products.keys():
            if self.pg != None:            
                self.pg.incSub("getting holdings data for " + self.products[p].name)
            if product_name == self.products[p].name or product_name == "None":
                fund_path = cp + "products/" + self.products[p].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                holding_data = {}
                
                holding_ids = self.bsdm.getHoldingIds(self.products[p].backstop_id)
                for h_id in holding_ids:
                    info = self.bsdm.getHoldingInformation(h_id)
                    bal = self.bsdm.getHoldingBalances(h_id,as_of_date,as_of_date)
                    holding_data[h_id] = [info,bal]
                
                rep_file = open(fund_path + "/holding_data.cache",'w')
                cPickle.dump(holding_data,rep_file)
                rep_file.close()
        self.loaded[7] = True
            
    def loadAumData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
            
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading AUM Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting aum data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                aum_data = self.bsdm.getHedgeFundAums(self.funds[f].backstop_id, date(1990,1,1), as_of_date)
                    
                rep_file = open(fund_path + "/aum_data.cache",'w')
                cPickle.dump(aum_data,rep_file)
                rep_file.close()
        self.loaded[8] = True
        
    def loadReturnsData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading Returns Data.....")
            
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting returns data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)            
                return_data = self.bsdm.getHedgeFundReturns(self.funds[f].backstop_id, self.funds[f].incept_date, as_of_date)
                        
                rep_file = open(fund_path + "/return_data.cache",'w')
                cPickle.dump(return_data,rep_file)
                rep_file.close()
        self.loaded[9] = True
    
    def loadStatisticsData(self,as_of_date,fund_name="None"):
        if not self.loaded[0]:
            self.loadFundData(as_of_date,True)
        if not self.loaded[1]:
            self.loadProductData(as_of_date,True)
        
        if self.pg != None:
            self.pg.startSubProcess(self.pg.cur_sub_proc_total,len(self.funds.keys()))
            self.pg.addMessage("Downloading statistics Data.....")
           
        cp = self.cache_path
        for f in self.funds.keys():
            if self.pg != None:            
                self.pg.incSub("getting statistics data for " + self.funds[f].name)
            if fund_name == self.funds[f].name or fund_name == "None":
                fund = self.funds[f]
                fund_path = cp + "funds/" + self.funds[f].name
                if not os.access(fund_path +"/",os.F_OK):
                    os.mkdir(fund_path)
                stats = StatisticsCalculations(self.bsdm,f,fund.first_investment_date,as_of_date)
                stats_data = {}
                
                try:
                    stats_data["Compound Return"] = [stats.CompoundReturn()]
                except:
                    stats_data["Compound Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Geometric Return"] = [stats.AnnualizedGeometricReturn()]
                except:
                    stats_data["Annualized Geometric Return"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Arithmetic Standard Deviation"] = [stats.AnnualizedArithmeticStandardDeviation()]
                except:
                    stats_data["Annualized Arithmetic Standard Deviation"] = ["N/A"]
                    pass
                try:
                    stats_data["Beta"] = [stats.Beta()]
                except:
                    stats_data["Beta"] = ["N/A"]
                    pass
                try:
                    stats_data["Alpha"] = [stats.AnnualizedAlpha()]
                except:
                    stats_data["Alpha"] = ["N/A"]
                    pass
                try:
                    stats_data["Correlation Coefficient"] = [stats.CorrelationCoefficient()]
                except:
                    stats_data["Correlation Coefficient"] = ["N/A"]
                    pass
                try:
                    stats_data["RSquared"] = [stats.RSquared()]
                except:
                    stats_data["RSquared"] = ["N/A"]
                    pass
                try:
                    stats_data["Max Drawdown"] = [stats.MaxDrawDown()]
                except:
                    stats_data["Max Drawdown"] = ["N/A"]
                    pass
                try:
                    stats_data["Sharpe Ratio"] = [stats.SharpeRatio()]
                except:
                    stats_data["Sharpe Ratio"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Up-Capture"] = [stats.AnnualizedUpCapture()]
                except:
                    stats_data["Annualized Up-Capture"] = ["N/A"]
                    pass
                try:
                    stats_data["Annualized Down-Capture"] = [stats.AnnualizedDownCapture()]
                except:
                    stats_data["Annualized Down-Capture"] = ["N/A"]
                    pass
                rep_file = open(fund_path + "/stats_data.cache",'w')
                cPickle.dump(stats_data,rep_file)
                rep_file.close()
        self.loaded[10] = True
class StatsCalculator:
    def __init__(self, cache_path, backstop_only = False):
        self.backstop_only = backstop_only
        self.cache_path = cache_path

        if self.cache_path[len(self.cache_path)-1] != "/":
            self.cache_path += "/"

        self.bsdm = BackStopDataManager()
        
        self.returns_index = {}
    
        if not self.backstop_only:
            self.loadCachedReturns()
        else:
            self.returns_index = {}
            self.returns_index[""] = {}
            
    def loadCachedReturns(self):
        if os.access(self.cache_path + "returns/returns.cache",os.F_OK):
            tmp_file = open(self.cache_path + "returns/returns.cache")
            self.returns_index = cPickle.load(tmp_file)
            tmp_file.close()
            
    def calcStatisticsFromList(self,fund_ids, start_date, end_date):                  
        self.benchmark_returns_RMF = []
        self.benchmark_returns_RF = []
        self.risk_free_returns_RMF = []
        self.risk_free_returns_RF = []
        
        stats_index = {}
        
        if self.backstop_only:
            tmp_returns = self.bsdm.getHedgeFundReturns(496539, start_date, end_date)
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            self.benchmark_returns_RMF.append(item[1] + 1)     
                            self.benchmark_returns_RF.append(item[1]) 
            
            tmp_returns = self.bsdm.getHedgeFundReturns(496605, start_date, end_date)
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            self.risk_free_returns_RMF.append(item[1] + 1)     
                            self.risk_free_returns_RF.append(item[1])
        else:
            for y in range(start_date.year,end_date.year+1):
                if y == start_date.year and y == end_date.year:
                    for m in range(start_date.month,end_date.month+1):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                elif y == start_date.year:
                    for m in range(start_date.month,13):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                elif y == end_date.year:
                    for m in range(1,end_date.month+1):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                else:
                    for m in range(1,13):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
        for fund_id in fund_ids:
            self.fund_returns_RMF = []  
            self.fund_returns_RF = []  
            if self.backstop_only:
                tmp_returns = self.bsdm.getHedgeFundReturns(fund_id, start_date, end_date)
                if tmp_returns != False:
                    for month in tmp_returns:
                        for item in month:
                            if item[0] == "amount":
                                self.fund_returns_RMF.append(item[1] + 1)     
                                self.fund_returns_RF.append(item[1])
            else:
                for y in range(start_date.year,end_date.year+1):
                    if y == start_date.year and y == end_date.year:
                        for m in range(start_date.month,end_date.month+1):
                            self.addMonthOfReturns(fund_id,y,m,3)
                    elif y == start_date.year:
                        for m in range(start_date.month,13):
                            self.addMonthOfReturns(fund_id,y,m,3)
                    elif y == end_date.year:
                        for m in range(1,end_date.month+1):
                            self.addMonthOfReturns(fund_id,y,m,3)
                    else:
                        for m in range(1,13):
                            self.addMonthOfReturns(fund_id,y,m,3)
                            
            stats_index[fund_id] = self.gatherStats()
        return stats_index        
                        
    def calcStatistics(self, fund_id, start_date, end_date):
        self.fund_returns_RMF = []  
        self.fund_returns_RF = []  
        self.benchmark_returns_RMF = []
        self.benchmark_returns_RF = []
        self.risk_free_returns_RMF = []
        self.risk_free_returns_RF = []
        
        if self.backstop_only:
            tmp_returns = self.bsdm.getHedgeFundReturns(496539, start_date, end_date)
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            self.benchmark_returns_RMF.append(item[1] + 1)     
                            self.benchmark_returns_RF.append(item[1]) 
            
            tmp_returns = self.bsdm.getHedgeFundReturns(496605, start_date, end_date)
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            self.risk_free_returns_RMF.append(item[1] + 1)     
                            self.risk_free_returns_RF.append(item[1])
            
            tmp_returns = self.bsdm.getHedgeFundReturns(fund_id, start_date, end_date)
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            self.fund_returns_RMF.append(item[1] + 1)     
                            self.fund_returns_RF.append(item[1])
        else:
            for y in range(start_date.year,end_date.year+1):
                if y == start_date.year and y == end_date.year:
                    for m in range(start_date.month,end_date.month+1):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                        self.addMonthOfReturns(fund_id,y,m,3)
                elif y == start_date.year:
                    for m in range(start_date.month,13):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                        self.addMonthOfReturns(fund_id,y,m,3)
                elif y == end_date.year:
                    for m in range(1,end_date.month+1):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                        self.addMonthOfReturns(fund_id,y,m,3)
                else:
                    for m in range(1,13):
                        self.addMonthOfReturns(496539,y,m,1)
                        self.addMonthOfReturns(496605,y,m,2)
                        self.addMonthOfReturns(fund_id,y,m,3)
        stats = self.gatherStats()
        return stats
    
    def gatherStats(self):       
        stats = []
        try:
            stats.append(self.AnnualizedGeometricReturn())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.CompoundReturn())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AnnualizedArithmeticStandardDeviation())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.MaxDrawDown())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.SecondMaxDrawDown())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.SharpeRatio())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.CumulativeUpCapture())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.CumulativeDownCapture())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AnnualizedUpCapture())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AnnualizedDownCapture())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.PercentPeriodsPositve())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.PercentPeriodsNegative())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AverageReturn())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.HighestSinglePeriodReturn())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.LowestSinglePeriodReturn())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AverageGainInPositivePeriod())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AverageLossInLossPeriod())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.AnnualizedAlpha())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.Beta())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.CorrelationCoefficient())
        except:
            stats.append("N/A")
            pass
        try:
            stats.append(self.RSquared())
        except:
            stats.append("N/A")
            pass
        return stats 
    
    def addMonthOfReturns(self,fund_id,year,month,type):
        from_BS = False
        
        if year not in self.returns_index.keys():
            from_BS = True
        elif month not in self.returns_index[year].keys():
            from_BS = True
        elif fund_id not in self.returns_index[year][month].keys():
            from_BS = True
        
        if from_BS:
            tmp_returns = self.bsdm.getHedgeFundReturns(fund_id, date(year,month,15), date(year,month,15))
            if tmp_returns != False:
                for month in tmp_returns:
                    for item in month:
                        if item[0] == "amount":
                            if type == 3:
                                self.fund_returns_RMF.append(item[1] + 1)     
                                self.fund_returns_RF.append(item[1])
                            elif type == 2:
                                self.risk_free_returns_RMF.append(item[1] + 1)     
                                self.risk_free_returns_RF.append(item[1])
                            else:
                                self.benchmark_returns_RMF.append(item[1] + 1)     
                                self.benchmark_returns_RF.append(item[1])
        else:
            def_values = False
            if fund_id in self.returns_index[year][month].keys():
                if "RMF" in self.returns_index[year][month][fund_id].keys():
                    if type == 3:
                        self.fund_returns_RMF.append(self.returns_index[year][month][fund_id]["RMF"])     
                        self.fund_returns_RF.append(self.returns_index[year][month][fund_id]["RF"])
                    elif type == 2:
                        self.risk_free_returns_RMF.append(self.returns_index[year][month][fund_id]["RMF"])     
                        self.risk_free_returns_RF.append(self.returns_index[year][month][fund_id]["RF"])
                    else:
                        self.benchmark_returns_RMF.append(self.returns_index[year][month][fund_id]["RMF"])     
                        self.benchmark_returns_RF.append(self.returns_index[year][month][fund_id]["RF"])
                else:
                    def_values = True 
            else:
                def_values = True
            
            if def_values: 
                if type == 3:
                    self.fund_returns_RMF.append(1.0)     
                    self.fund_returns_RF.append(0.0)
                elif type == 2:
                    self.risk_free_returns_RMF.append(1.0)     
                    self.risk_free_returns_RF.append(0.0)
                else:
                    self.benchmark_returns_RMF.append(1.0)     
                    self.benchmark_returns_RF.append(0.0)
                           

    def AnnualizedGeometricReturn(self):
        if len(self.fund_returns_RMF) < 12:
            return self.CompoundReturn()
        else:
            annual_geo_return = 1.0
            for rtrn in self.fund_returns_RMF:
                annual_geo_return = annual_geo_return * rtrn
            annual_geo_return = (annual_geo_return**(12.0 / len(self.fund_returns_RMF)) - 1)
            return annual_geo_return
    
    def AnnGeometricReturn(self,fund_returns_RMF):
        annual_geo_return = 1.0
        for rtrn in fund_returns_RMF:
            annual_geo_return = annual_geo_return * rtrn
        annual_geo_return = (annual_geo_return**(12.0 / len(fund_returns_RMF)) - 1)
        return annual_geo_return
    
    def AnnualizedArithmeticStandardDeviation(self):
        if len(self.fund_returns_RMF) < 12:
            return self.ArithmeticStandardDeviation(self.fund_returns_RMF)
        else:
            return self.ArithmeticStandardDeviation(self.fund_returns_RMF) * 12**.5

    def CompoundReturn(self):
        compound_return = 1.0
        for rtrn in self.fund_returns_RMF:
            compound_return = compound_return * rtrn
        compound = compound_return - 1
        return compound
    
    def CmpndReturn(self, fund_returns_RMF):
        compound_return = 1.0
        for rtrn in fund_returns_RMF:
            compound_return = compound_return * rtrn
        compound = compound_return - 1
        return compound
    
    def MaxDrawDown(self):
        sudo_drawdown, return_multiple = 1.0,1.0
        count = 0.0
        drawdown_collection, date_collection = [],[]
        for rtrn in self.fund_returns_RF:
            return_multiple = 1 + rtrn
            if rtrn < 0:
                sudo_drawdown = sudo_drawdown * return_multiple
            if return_multiple > 1:
                if return_multiple + sudo_drawdown > 1:
                    sudo_drawdown = sudo_drawdown * return_multiple
            if sudo_drawdown < 1:
                drawdown_collection.append(sudo_drawdown)
                date_collection.append(count)
            if sudo_drawdown > 1:
                sudo_drawdown = 1
            count = count + 1
        if len(drawdown_collection) == 0:
            return 0.0
        else:
            drawdown = (self.Min(drawdown_collection) - 1)
            return drawdown
    
    def SecondMaxDrawDown(self):
        sudo_drawdown, return_multiple = 1.0,1.0
        count = 0
        drawdown_collection, date_collection = [],[]
        for rtrn in self.fund_returns_RF:
            return_multiple = 1 + rtrn
            if rtrn < 0:
                sudo_drawdown = sudo_drawdown * return_multiple
            if return_multiple > 1:
                if return_multiple + sudo_drawdown > 1:
                    sudo_drawdown = sudo_drawdown * return_multiple
            if sudo_drawdown < 1:
                drawdown_collection.append(sudo_drawdown)
                date_collection.append(count)
            if sudo_drawdown > 1:
                sudo_drawdown = 1
            count = count + 1
            
        def getDrawdownRange(drawdown, drawdown_collection, date_collection, fund_returns_RF):
            end_pos = drawdown_collection.index(drawdown)
            end_pos = date_collection[end_pos]
            tmp = 1.0
            start_pos = 0
            for index in xrange(end_pos,0,-1):
                tmp = tmp * (1 + fund_returns_RF[index])
                if abs(tmp - drawdown) < 0.000001:
                    start_pos = index
                    break
            return [start_pos, end_pos]
        
        if len(drawdown_collection) == 0:
            return 0.0
        maxdraw = self.Min(drawdown_collection) 
        max_range = getDrawdownRange(maxdraw, drawdown_collection, date_collection, self.fund_returns_RF)
        new_drawdown_collection = drawdown_collection[:]
        sec_draw = 0.0
        for i in range(len(drawdown_collection)):
            min = self.Min(new_drawdown_collection)
            min_range = getDrawdownRange(min, drawdown_collection, date_collection, self.fund_returns_RF)
            if min_range[0] >= max_range[0] and min_range[0] <= max_range[1] or \
            min_range[1] >= max_range[0] and min_range[1] <= max_range[1]:
                new_drawdown_collection.remove(min)
            else:
                sec_draw = min
                break
        if len(new_drawdown_collection) == 0:
            return 0.0
        else:
            return sec_draw - 1
    
    def SharpeRatio(self):
        fund_risk_free_delta = []
        for point in range(len(self.fund_returns_RMF)):
            fund_risk_free_delta.append(self.fund_returns_RMF[point] - self.risk_free_returns_RMF[point] + 1)
        sharpe_ratio = self.AnnGeometricReturn(fund_risk_free_delta) / self.AnnualizedArithmeticStandardDeviation()
        if sharpe_ratio < 0:
            return "N/A"
        else:
            return sharpe_ratio

    def CumulativeUpCapture(self):
        benchmark_returns, fund_returns = [],[]
        for point in range(len(self.benchmark_returns_RMF)):
            if self.benchmark_returns_RMF[point] > 1:
                benchmark_returns.append(self.benchmark_returns_RMF[point])
                fund_returns.append(self.fund_returns_RMF[point])        
        cummulative_up_cap = self.CmpndReturn(fund_returns) / self.CmpndReturn(benchmark_returns)
        return cummulative_up_cap
    
    def CumulativeDownCapture(self):
        benchmark_returns, fund_returns = [],[]
        for point in range(len(self.benchmark_returns_RMF)):
            if self.benchmark_returns_RMF[point] < 1:
                benchmark_returns.append(self.benchmark_returns_RMF[point])
                fund_returns.append(self.fund_returns_RMF[point])        
        cummulative_down_cap = self.CmpndReturn(fund_returns) / self.CmpndReturn(benchmark_returns)
        return cummulative_down_cap
    
    def AnnualizedUpCapture(self):
        benchmark_positive_returns, fund_returns = [],[]
        for point in range(len(self.benchmark_returns_RMF)):
            if self.benchmark_returns_RMF[point] > 1:
                benchmark_positive_returns.append(self.benchmark_returns_RMF[point])
                fund_returns.append(self.fund_returns_RMF[point])
        annualized_up_cap = self.AnnGeometricReturn(fund_returns) / self.AnnGeometricReturn(benchmark_positive_returns)
        if len(benchmark_positive_returns) < 12:
            return self.CumulativeUpCapture()
        return annualized_up_cap
    
    def AnnualizedDownCapture(self):
        benchmark_negative_returns, fund_returns = [],[]
        for point in range(len(self.benchmark_returns_RMF)):
            if self.benchmark_returns_RMF[point] < 1:
                benchmark_negative_returns.append(self.benchmark_returns_RMF[point])
                fund_returns.append(self.fund_returns_RMF[point])
        annualized_down_cap = self.AnnGeometricReturn(fund_returns) / self.AnnGeometricReturn(benchmark_negative_returns)
        if len(benchmark_negative_returns) < 12:
            return self.CumulativeDownCapture()
        return annualized_down_cap
    
    def PercentPeriodsPositve(self):
        count = 0.0
        for rtrn in self.fund_returns_RF:
            if rtrn > 0:
                count = count + 1
        return count / len(self.fund_returns_RF)
    
    def PercentPeriodsNegative(self):
        count = 0.0
        for rtrn in self.fund_returns_RF:
            if rtrn < 0:
                count = count + 1
        return count / len(self.fund_returns_RF)
    
    def AverageReturn(self):
        return self.Average(self.fund_returns_RF)
    
    def HighestSinglePeriodReturn(self):
        return self.Max(self.fund_returns_RF)
    
    def LowestSinglePeriodReturn(self):
        return self.Min(self.fund_returns_RF)
    
    def AverageGainInPositivePeriod(self):
        returns_in_positive = []
        for rtrn in self.fund_returns_RF:
            if rtrn > 0:
                returns_in_positive.append(rtrn)
        return self.Average(returns_in_positive)
    
    def AverageLossInLossPeriod(self):
        returns_in_negative = []
        for rtrn in self.fund_returns_RF:
            if rtrn < 0:
                returns_in_negative.append(rtrn)
        if len(returns_in_negative) == 0:
            return 0.0
        else:
            return self.Average(returns_in_negative)
    
    def Beta(self):
        num_sum, den_sum = 0.0,0.0
        x_mean = self.Average(self.benchmark_returns_RF)
        y_mean = self.Average(self.fund_returns_RF)
        for point in range(len(self.fund_returns_RF)):
            num_sum = num_sum + (self.fund_returns_RF[point] - y_mean)*(self.benchmark_returns_RF[point]-x_mean)
            den_sum = den_sum + (self.benchmark_returns_RF[point] - x_mean)**2
        beta = num_sum / den_sum
        return beta
        
    def Alpha(self):
        beta = self.Beta()
        x_mean = self.Average(self.benchmark_returns_RF)
        y_mean = self.Average(self.fund_returns_RF)
        alpha = (y_mean - x_mean * beta) * 100
        return alpha
    
    def AnnualizedAlpha(self):
        if len(self.fund_returns_RF) < 12:
            return self.Alpha()
        else:
            return self.Alpha()*12.0
    
    def CorrelationCoefficient(self):
        num_sum = 0.0
        x_mean = self.Average(self.fund_returns_RF)
        y_mean = self.Average(self.benchmark_returns_RF)
        for point in range(len(self.fund_returns_RF)):
            num_sum = num_sum + (self.benchmark_returns_RF[point] - y_mean)*(self.fund_returns_RF[point]-x_mean)
        x_stdev = self.ArithmeticStandardDeviation(self.fund_returns_RF)
        y_stdev = self.ArithmeticStandardDeviation(self.benchmark_returns_RF)
        R = num_sum / ((len(self.fund_returns_RF) -1) * x_stdev * y_stdev)
        return R
    
    def RSquared(self):
        RSqrd = self.CorrelationCoefficient()**2
        return RSqrd
    
    def AlphaGivenBeta(self, beta):
        x_mean = self.Average(self.benchmark_returns_RF)
        y_mean = self.Average(self.fund_returns_RF)
        alpha = (y_mean - x_mean * beta) * 100
        return alpha
  
    def RSquaredGivenR(self, R):
        return  R**2  
    
    def ArithmeticStandardDeviation(self, data_set):
        mean = 0.0
        for rtrn in data_set:
            mean = mean + rtrn
        mean = mean / len(data_set)
        sum_dif_sqrd = 0
        for rtrn in data_set:
            sum_dif_sqrd = sum_dif_sqrd + (rtrn - mean)**2
        stdev = (sum_dif_sqrd / (len(data_set) - 1))**.5
        return stdev
    
    def Min(self, data_set):
        min = data_set[0]
        for point in data_set:
            if point < min:
                min = point
        return min
    
    def Max(self, data_set):
        max = data_set[0]
        for point in data_set:
            if point > max:
                max= point
        return max
        
    def Average(self, data_set):
        sum = 0.0
        for point in data_set:
            sum = sum + point
        return sum / len(data_set)
      
    def FormatPercentage(self, number):
        number = number * 100.0
        return str("%.2f" % round(number, 2)) + "%"
    
    def FormatNonPercentage(self, number):
        return str(round(number, 4))
    
    def DateConversion(self, date): #date object
        return date.strftime("%Y-%m-%dT00:00:00-05:00")
class StatsCacheUpdater:
    def __init__(self,
                 cache_path=("C:/Documents and Settings/" +
                             os.getenv("username") + "/cache_root/Stats")):
        if cache_path[len(cache_path) -
                      1] != "/" and cache_path[len(cache_path) - 1] != "\\":
            cache_path += "/"
        self.cache_path = cache_path
        t = date.today()
        self.cur_date = date(t.year, t.month, 15)

        self.get = BackStopDataManager()
        self.months = [
            "Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug",
            "Sep", "Oct", "Nov"
        ]
        self.checkFolders()
        self.loadData()

    def syncCache(self, source_path):
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[
            0:len(self.cache_path) - 1] + "\" /e /i /h"
        os.system(cmd_str)

    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "returns", os.F_OK):
            os.makedirs(cp + "returns")
        if not os.access(cp + "lists", os.F_OK):
            os.makedirs(cp + "lists")
        if not os.access(cp + "pg_lists", os.F_OK):
            os.makedirs(cp + "pg_lists")

    def loadData(self):
        self.returns_index = {}
        self.fund_return_dates = {}
        if os.access(self.cache_path + "returns/returns.cache", os.F_OK):
            ret_cache_file = open(self.cache_path + "returns/returns.cache")
            self.returns_index = cPickle.load(ret_cache_file)
            ret_cache_file.close()
            self.loadFundReturnDates()

    def loadFundReturnDates(self):
        for year in self.returns_index.keys():
            for month in self.returns_index[year].keys():
                for id in self.returns_index[year][month].keys():
                    if id not in self.fund_return_dates.keys():
                        self.fund_return_dates[id] = []
                    if "RMF" in self.returns_index[year][month][id].keys():
                        if date(year, month,
                                1) not in self.fund_return_dates[id]:
                            self.fund_return_dates[id].append(
                                date(year, month, 1))

    def updateReturns(self, as_of_date=date.today()):
        if self.equalOrAfter(as_of_date, date.today()):
            as_of_date = date.today()

        if not os.access(self.cache_path + "lists/active_funds.cache",
                         os.F_OK):
            self.updateLists()

        tmp_file = open(self.cache_path + "lists/active_funds.cache")
        active_funds = cPickle.load(tmp_file)
        tmp_file.close()

        benchmarks = [496539, 496605]

        returns = {}

        for b_id in benchmarks:
            tmp_returns = self.get.getHedgeFundReturns(b_id, as_of_date,
                                                       as_of_date)
            if tmp_returns != False:
                returns[b_id] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[b_id]["RMF"] = month[0][1] + 1
                        returns[b_id]["RF"] = month[0][1]

        for fund_var in range(0, len(active_funds)):
            if fund_var % 25 == 0:
                print 100.0 * float(fund_var) / float(len(active_funds)), "%"
            fund = active_funds[fund_var]
            tmp_returns = self.get.getHedgeFundReturns(fund.ID, as_of_date,
                                                       as_of_date)
            if tmp_returns != False:
                returns[fund.ID] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[fund.ID]["RMF"] = month[0][1] + 1
                        returns[fund.ID]["RF"] = month[0][1]

        if not as_of_date.year in self.returns_index.keys():
            self.returns_index[as_of_date.year] = {}

        self.returns_index[as_of_date.year][as_of_date.month] = returns

        self.loadFundReturnDates()

        ret_file = open(self.cache_path + "returns/returns.cache", 'w')
        cPickle.dump(self.returns_index, ret_file)
        ret_file.close()

    def updatePGList(self):
        ids = self.get.getPeerGroupIds()
        id_name_map = dict()
        for id in ids:
            if "{Stats}" in id[1]:
                id_name_map[id[0]] = id[1]

        file = open(
            self.cache_path + "pg_lists/cache-peer_group_id_name_map.cache",
            "w")
        cPickle.dump(id_name_map, file)
        file.close()

        pg_member_ids = {}
        for pg_id in id_name_map.keys():
            info = self.get.getPeerGroupMemberIds(pg_id, date.today())
            id_list = []
            for i in info:
                id_list.append(i[0])
            pg_member_ids[pg_id] = id_list

        file = open(
            self.cache_path + "pg_lists/cache-peer_group_member_id.cache", "w")
        cPickle.dump(pg_member_ids, file)
        file.close()

    def updateMCPFAIList(self):
        MCPFAI = []

        file_def = open("def/MCPFAI_def.dat")
        file_res = open("def/MCPFAI_res.dat")
        report_def = file_def.read()
        report_res = file_res.read()
        info = self.get.runFundsReport(report_def, report_res, date.today())

        for data in info:
            fund = Fund()
            fund.Name = data[0][1]
            fund.ID = data[1][1]
            fund.DMRR = datetime.strptime(data[2][1], "%m/%d/%Y")
            fund.incept_date = datetime.strptime(data[3][1], "%m/%d/%Y")
            fund.IL = "MCP Fund or Index"
            fund.Class = "MCP Fund or Index"
            fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)
            MCPFAI.append(fund)

        tmp_file = open(self.cache_path + "lists/MCPFAI_funds.cache", 'w')
        cPickle.dump(MCPFAI, tmp_file)
        tmp_file.close()

    def updateLists(self):
        self.updatePGList()
        self.updateMCPFAIList()
        self.updateFundList()

    def updateFundList(self):
        file_def = open("def/all_funds_def.dat")
        report_def = file_def.read()
        report_res = "${true}"
        t = date.today()

        info = self.get.runFundsReport(report_def, report_res, self.cur_date)

        funds = []
        active_funds = []
        fund_index = {}

        invested_funds = []
        focus_list_funds = []
        inv_and_focus_funds = []
        ex_focus_funds = []
        ex_invested_funds = []
        ex_inv_and_focus_funds = []

        for data in info:
            if data[2][1] != 'None':
                fund = Fund()
                fund.Name = data[0][1]
                fund.ID = data[1][1]
                fund.DMRR = datetime.strptime(data[2][1], "%m/%d/%Y")
                fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)
                fund.IL = data[4][1]
                fund.StrengthRating = data[5][1]
                fund.Class = data[6][1]
                fund.prime_strat = data[7][1]
                fund.sub_strat1 = data[8][1]
                fund.sub_strat2 = data[9][1]
                fund.city = data[10][1]
                tmp_state = self.getState(data[12][1])
                if tmp_state == "None":
                    fund.state = data[11][1]
                else:
                    fund.state = tmp_state
                fund.postal_code = data[12][1]
                fund.incept_date = datetime.strptime(data[13][1], "%m/%d/%Y")
                fund.part_of_pmg = str.lower(str(data[14][1])).strip()
                if fund.part_of_pmg == "yes":
                    fund.part_of_pmg = True
                else:
                    fund.part_of_pmg = False
                funds.append(fund)

        for fund in funds:
            fund.ListName = fund.Name + "                                                                                                              (MRR:" + self.dateFormat(
                fund.DMRR, "LIST") + ")"
            fund.ListName += "(Int:" + fund.IL + ")"
            fund.ListName += "(Str:" + fund.StrengthRating + ")"
            fund.ListName += "(Cls:" + fund.Class + ")"
            fund.ListName += "(Strat:" + fund.prime_strat + ")"
            fund.ListName += "(Sub1:" + fund.sub_strat1 + ")"
            fund.ListName += "(Sub2:" + fund.sub_strat2 + ")"
            fund.ListName += "(City:" + fund.city + ")"
            fund.ListName += "(State:" + fund.state + ")"
            fund.ListName += "(Zip:" + fund.postal_code + ")"
            fund_index[fund.ID] = fund
            if self.equalOrAfter(
                    fund.DMRR,
                    datetime(self.cur_date.year - 1, self.cur_date.month,
                             self.cur_date.day)):
                active_funds.append(fund)
            if fund.IL == "Invested" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                invested_funds.append(fund)
                ex_focus_funds.append(fund)
            elif fund.IL == "Focus List" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                focus_list_funds.append(fund)
                ex_invested_funds.append
            else:
                ex_invested_funds.append(fund)
                ex_focus_funds.append(fund)
                ex_inv_and_focus_funds.append(fund)

        invested_funds = sorted(invested_funds, key=lambda Fund: Fund.Name)
        focus_list_funds = sorted(focus_list_funds, key=lambda Fund: Fund.Name)
        inv_and_focus_funds = sorted(inv_and_focus_funds,
                                     key=lambda Fund: Fund.Name)
        ex_focus_funds = sorted(ex_focus_funds, key=lambda Fund: Fund.Name)
        ex_invested_funds = sorted(ex_invested_funds,
                                   key=lambda Fund: Fund.Name)
        ex_inv_and_focus_funds = sorted(ex_inv_and_focus_funds,
                                        key=lambda Fund: Fund.Name)
        active_funds = sorted(active_funds, key=lambda Fund: Fund.Name)
        funds = sorted(funds, key=lambda Fund: Fund.Name)

        file = open(self.cache_path + "lists/" + "all_funds.cache", "w")
        cPickle.dump(funds, file)
        file.close()

        file = open(self.cache_path + "lists/" + "active_funds.cache", "w")
        cPickle.dump(active_funds, file)
        file.close()

        file = open(self.cache_path + "lists/invested_funds.cache", "w")
        cPickle.dump(invested_funds, file)
        file.close()

        file = open(self.cache_path + "lists/focus_list_funds.cache", "w")
        cPickle.dump(focus_list_funds, file)
        file.close()

        file = open(self.cache_path + "lists/inv_and_focus_funds.cache", "w")
        cPickle.dump(inv_and_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-focus.cache", "w")
        cPickle.dump(ex_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-invested.cache", "w")
        cPickle.dump(ex_invested_funds, file)
        file.close()

        file = open(self.cache_path + "lists/ex-invested_and_focus.cache", "w")
        cPickle.dump(ex_inv_and_focus_funds, file)
        file.close()

        file = open(self.cache_path + "lists/all_fund_index.cache", "w")
        cPickle.dump(fund_index, file)
        file.close()

    def getActualDMRR(self, id, dmrr):
        tmp_dmrr = dmrr

        if id in self.fund_return_dates.keys():
            dates = self.fund_return_dates[id]
        elif str(id) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[str(id)]
        elif int(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[int(str(id))]
        elif unicode(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[unicode(str(id))]
        else:
            dates = []
        print "dates = ", dates
        if len(dates) > 0:
            dates = sorted(dates, reverse=True)
            if self.equalOrAfter(dates[0], dmrr):
                tmp_dmrr = dates[0]
        return tmp_dmrr

    def equalOrAfter(self, question, bench):
        if question.year > bench.year or question.year == bench.year and question.month >= bench.month:
            return True
        else:
            return False

    def prevMonthYear(self, offset, cur_date=datetime.now()):
        year = 0
        if cur_date.month == 12:
            year = cur_date.year + 1
        else:
            year = cur_date.year
        return year - offset

    def prevMonth(self, cur_date=datetime.now()):
        month = 0
        if cur_date.month == 1:
            month = 11
        else:
            month = cur_date.month - 2
        return month

    def getEndOfMonth(self, month, year):
        num_days = 0
        if month == 4 or month == 6 or month == 9 or month == 11:
            num_days = 30
        elif month == 2:
            if calendar.isleap(year):
                num_days = 29
            else:
                num_days = 28
        else:
            num_days = 31
        return num_days

    def dateFormat(self, cur_date, type="DEFAULT"):
        formatted_str = ""
        if type == "LIST":
            formatted_str = self.months[cur_date.month % 12] + "'" + str(
                cur_date.year)[2:]
        elif type == "DEFAULT":
            year = cur_date.year - 2000
            if year < 10:
                year = "0" + str(year)
            else:
                year = str(year)
            formatted_str = months[cur_date.month % 12] + "-" + year
        return formatted_str

    def getState(self, zip_code):
        prefix = str(zip_code)[0:3]
        try:
            prefix = int(prefix)
        except:
            prefix = -1
        if prefix >= 995:
            state = "Alaska"
        elif prefix >= 980:
            state = "Washington"
        elif prefix >= 970:
            state = "Oregon"
        elif prefix >= 967:
            state = "Hawaii"
        elif prefix >= 962:
            state = "None"
        elif prefix >= 900:
            state = "California"
        elif prefix >= 889:
            state = "Nevada"
        elif prefix == 885:
            state = "Texas"
        elif prefix >= 870:
            state = "New Mexico"
        elif prefix >= 850:
            state = "Arizona"
        elif prefix >= 840:
            state = "Utah"
        elif prefix >= 832:
            state = "Idaho"
        elif prefix >= 820:
            state = "Wyoming"
        elif prefix >= 800:
            state = "Colorado"
        elif prefix >= 750:
            state = "Texas"
        elif prefix >= 730:
            state = "Oklahoma"
        elif prefix >= 716:
            state = "Arkansas"
        elif prefix >= 700:
            state = "Lousiana"
        elif prefix >= 680:
            state = "Nebraska"
        elif prefix >= 660:
            state = "Kansas"
        elif prefix >= 630:
            state = "Missouri"
        elif prefix >= 600:
            state = "Illinois"
        elif prefix >= 590:
            state = "Montana"
        elif prefix >= 580:
            state = "North Dakota"
        elif prefix >= 570:
            state = "South Dakota"
        elif prefix >= 569:
            state = "Washington D.C."
        elif prefix >= 550:
            state = "Minnesota"
        elif prefix >= 530:
            state = "Wisconsin"
        elif prefix >= 500:
            state = "Iowa"
        elif prefix >= 480:
            state = "Michigan"
        elif prefix >= 460:
            state = "Indiana"
        elif prefix >= 430:
            state = "Ohio"
        elif prefix >= 400:
            state = "Kentucky"
        elif prefix >= 398:
            state = "Georgia"
        elif prefix >= 386:
            state = "Mississippi"
        elif prefix >= 370:
            state = "Tennessee"
        elif prefix >= 350:
            state = "Alabama"
        elif prefix >= 320:
            state = "Florida"
        elif prefix >= 300:
            state = "Georgia"
        elif prefix >= 290:
            state = "South Carolina"
        elif prefix >= 270:
            state = "North Carolina"
        elif prefix >= 247:
            state = "West Virginia"
        elif prefix >= 220:
            state = "Virginia"
        elif prefix >= 206:
            state = "Maryland"
        elif prefix >= 202:
            state = "Fed. Government"
        elif prefix >= 201:
            state = "Virginia"
        elif prefix >= 200:
            state = "Washington D.C."
        elif prefix >= 197:
            state = "Delaware"
        elif prefix >= 150:
            state = "Pennsylvania"
        elif prefix >= 100:
            state = "New York"
        elif prefix >= 90:
            state = "None"
        elif prefix >= 70:
            state = "New Jersey"
        elif prefix >= 60:
            state = "Connecticut"
        elif prefix >= 50:
            state = "Vermont"
        elif prefix >= 39:
            state = "Maine"
        elif prefix >= 30:
            state = "New Hampshire"
        elif prefix >= 28:
            state = "Rhode Island"
        elif prefix >= 10:
            state = "Massachusetts"
        elif prefix >= 6:
            state = "Puerto Rico"
        else:
            state = "None"
        return state
class StatsCacheUpdater:    
    def __init__(self,cache_path=("C:/Documents and Settings/" + os.getenv("username") + "/cache_root/Stats")):
        if cache_path[len(cache_path)-1] != "/" and cache_path[len(cache_path)-1] != "\\":
            cache_path += "/"        
        self.cache_path = cache_path
        t = date.today()
        self.cur_date = date(t.year,t.month,15)
        
        self.get = BackStopDataManager()
        self.months = ["Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep","Oct", "Nov"]
        self.checkFolders()
        self.loadData()
    
    def syncCache(self,source_path):
        cmd_str = "xcopy \"" + source_path + "\" \"" + self.cache_path[0:len(self.cache_path)-1] + "\" /e /i /h"
        os.system(cmd_str)
    
    def checkFolders(self):
        cp = self.cache_path
        if not os.access(cp + "returns", os.F_OK):
            os.makedirs(cp + "returns")
        if not os.access(cp + "lists", os.F_OK):
            os.makedirs(cp + "lists")
        if not os.access(cp + "pg_lists", os.F_OK):
            os.makedirs(cp + "pg_lists")
    
    def loadData(self):
        self.returns_index = {}
        self.fund_return_dates = {}
        if os.access(self.cache_path + "returns/returns.cache",os.F_OK):
            ret_cache_file = open(self.cache_path + "returns/returns.cache")
            self.returns_index = cPickle.load(ret_cache_file)
            ret_cache_file.close()
            self.loadFundReturnDates()

    def loadFundReturnDates(self):
        for year in self.returns_index.keys():
            for month in self.returns_index[year].keys():
                for id in self.returns_index[year][month].keys():
                    if id not in self.fund_return_dates.keys():
                        self.fund_return_dates[id] = []
                    if "RMF" in self.returns_index[year][month][id].keys():
                        if date(year,month,1) not in self.fund_return_dates[id]:
                            self.fund_return_dates[id].append(date(year,month,1))
        
    def updateReturns(self,as_of_date=date.today()):
        if self.equalOrAfter(as_of_date,date.today()):
            as_of_date = date.today()
                
        if not os.access(self.cache_path + "lists/active_funds.cache",os.F_OK):
            self.updateLists()
            
        tmp_file = open(self.cache_path + "lists/active_funds.cache")
        active_funds = cPickle.load(tmp_file)
        tmp_file.close()
        
        benchmarks = [496539,496605]
        
        returns = {}
        
        for b_id in benchmarks:
            tmp_returns = self.get.getHedgeFundReturns(b_id, as_of_date, as_of_date)
            if tmp_returns != False:
                returns[b_id] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[b_id]["RMF"] = month[0][1] + 1     
                        returns[b_id]["RF"] = month[0][1]
          
        for fund_var in range(0,len(active_funds)):
            if fund_var % 25 == 0:
                print 100.0 * float(fund_var)/float(len(active_funds)), "%"
            fund = active_funds[fund_var]
            tmp_returns = self.get.getHedgeFundReturns(fund.ID, as_of_date, as_of_date)
            if tmp_returns != False: 
                returns[fund.ID] = {}
                for month in tmp_returns:
                    if str(month[5][1]) != "not_existent":
                        returns[fund.ID]["RMF"] = month[0][1] + 1     
                        returns[fund.ID]["RF"] = month[0][1]

        if not as_of_date.year in self.returns_index.keys():
            self.returns_index[as_of_date.year] = {}
        
        self.returns_index[as_of_date.year][as_of_date.month] = returns
        
        self.loadFundReturnDates()
        
        ret_file = open(self.cache_path + "returns/returns.cache",'w')
        cPickle.dump(self.returns_index, ret_file)
        ret_file.close()
    
    def updatePGList(self):
        ids = self.get.getPeerGroupIds()
        id_name_map = dict()
        for id in ids:
            if "{Stats}" in id[1]:
                id_name_map[id[0]] = id[1]

        file = open(self.cache_path + "pg_lists/cache-peer_group_id_name_map.cache", "w")
        cPickle.dump(id_name_map,file)
        file.close()
        
        pg_member_ids = {}
        for pg_id in id_name_map.keys():
            info = self.get.getPeerGroupMemberIds(pg_id,date.today())
            id_list = []
            for i in info:
                id_list.append(i[0])
            pg_member_ids[pg_id] = id_list

        file = open(self.cache_path + "pg_lists/cache-peer_group_member_id.cache", "w")
        cPickle.dump(pg_member_ids,file)
        file.close()
    
    def updateMCPFAIList(self):
        MCPFAI = []
        
        file_def = open("def/MCPFAI_def.dat")
        file_res = open("def/MCPFAI_res.dat")
        report_def = file_def.read()
        report_res = file_res.read()
        info = self.get.runFundsReport(report_def, report_res, date.today())
        
        for data in info:
            fund = Fund()
            fund.Name = data[0][1]
            fund.ID = data[1][1]
            fund.DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
            fund.incept_date = datetime.strptime(data[3][1],"%m/%d/%Y")
            fund.IL = "MCP Fund or Index"
            fund.Class = "MCP Fund or Index"
            fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)                                
            MCPFAI.append(fund)
        
        tmp_file = open(self.cache_path + "lists/MCPFAI_funds.cache",'w')
        cPickle.dump(MCPFAI, tmp_file)
        tmp_file.close()
        
    def updateLists(self):
        self.updatePGList()
        self.updateMCPFAIList()
        self.updateFundList()
    
    def updateFundList(self):        
        file_def = open("def/all_funds_def.dat")
        report_def = file_def.read()
        report_res = "${true}"
        t = date.today()

        info = self.get.runFundsReport(report_def, report_res, self.cur_date)
        
        funds = []
        active_funds = []
        fund_index = {}
        
        invested_funds = []
        focus_list_funds = []
        inv_and_focus_funds = []
        ex_focus_funds = []
        ex_invested_funds = []
        ex_inv_and_focus_funds = []
        
        for data in info:
            if data[2][1] != 'None':
                fund = Fund()
                fund.Name = data[0][1] 
                fund.ID = data[1][1]
                fund.DMRR = datetime.strptime(data[2][1],"%m/%d/%Y")
                fund.DMRR = self.getActualDMRR(fund.ID, fund.DMRR)   
                fund.IL = data[4][1]
                fund.StrengthRating = data[5][1]
                fund.Class = data[6][1]
                fund.prime_strat = data[7][1]
                fund.sub_strat1 = data[8][1]
                fund.sub_strat2 = data[9][1]
                fund.city = data[10][1]
                tmp_state = self.getState(data[12][1])
                if tmp_state == "None":
                    fund.state = data[11][1]
                else:
                    fund.state = tmp_state
                fund.postal_code = data[12][1]
                fund.incept_date = datetime.strptime(data[13][1],"%m/%d/%Y")
                fund.part_of_pmg = str.lower(str(data[14][1])).strip()
                if fund.part_of_pmg == "yes":
                    fund.part_of_pmg = True
                else:
                    fund.part_of_pmg = False
                funds.append(fund)
        
        for fund in funds:
            fund.ListName = fund.Name + "                                                                                                              (MRR:" + self.dateFormat(fund.DMRR,"LIST") + ")"
            fund.ListName += "(Int:" + fund.IL + ")"
            fund.ListName += "(Str:" + fund.StrengthRating + ")"
            fund.ListName += "(Cls:" + fund.Class + ")"
            fund.ListName += "(Strat:" + fund.prime_strat + ")"
            fund.ListName += "(Sub1:" + fund.sub_strat1 + ")"
            fund.ListName += "(Sub2:" + fund.sub_strat2 + ")"
            fund.ListName += "(City:" + fund.city +")"
            fund.ListName += "(State:" + fund.state + ")"
            fund.ListName += "(Zip:" + fund.postal_code + ")"
            fund_index[fund.ID] = fund
            if self.equalOrAfter(fund.DMRR,datetime(self.cur_date.year-1,self.cur_date.month,self.cur_date.day)):
                active_funds.append(fund)
            if fund.IL == "Invested" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                invested_funds.append(fund)
                ex_focus_funds.append(fund)
            elif fund.IL == "Focus List" and fund.part_of_pmg:
                inv_and_focus_funds.append(fund)
                focus_list_funds.append(fund)
                ex_invested_funds.append
            else:
                ex_invested_funds.append(fund)
                ex_focus_funds.append(fund)
                ex_inv_and_focus_funds.append(fund)
        
        invested_funds = sorted(invested_funds, key=lambda Fund:Fund.Name)
        focus_list_funds = sorted(focus_list_funds, key=lambda Fund:Fund.Name)
        inv_and_focus_funds = sorted(inv_and_focus_funds, key=lambda Fund:Fund.Name)
        ex_focus_funds = sorted(ex_focus_funds, key=lambda Fund:Fund.Name)
        ex_invested_funds = sorted(ex_invested_funds, key=lambda Fund:Fund.Name)
        ex_inv_and_focus_funds = sorted(ex_inv_and_focus_funds, key=lambda Fund:Fund.Name)
        active_funds = sorted(active_funds, key=lambda Fund:Fund.Name)
        funds = sorted(funds, key=lambda Fund:Fund.Name)
        
        file = open(self.cache_path + "lists/" + "all_funds.cache", "w")
        cPickle.dump(funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/" + "active_funds.cache", "w")
        cPickle.dump(active_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/invested_funds.cache", "w")
        cPickle.dump(invested_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/focus_list_funds.cache", "w")
        cPickle.dump(focus_list_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/inv_and_focus_funds.cache", "w")
        cPickle.dump(inv_and_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-focus.cache", "w")
        cPickle.dump(ex_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-invested.cache", "w")
        cPickle.dump(ex_invested_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/ex-invested_and_focus.cache", "w")
        cPickle.dump(ex_inv_and_focus_funds,file)
        file.close()
        
        file = open(self.cache_path + "lists/all_fund_index.cache", "w")
        cPickle.dump(fund_index,file)
        file.close()
    
    def getActualDMRR(self,id,dmrr):
        tmp_dmrr = dmrr
        
        if id in self.fund_return_dates.keys():
            dates = self.fund_return_dates[id]
        elif str(id) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[str(id)]
        elif int(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[int(str(id))]
        elif unicode(str(id)) in self.fund_return_dates.keys():
            dates = self.fund_return_dates[unicode(str(id))]
        else:
            dates = []
        print "dates = ", dates
        if len(dates) > 0:
            dates = sorted(dates,reverse=True)
            if self.equalOrAfter(dates[0],dmrr):
                tmp_dmrr = dates[0]
        return tmp_dmrr
        
    def equalOrAfter(self,question, bench):
        if question.year > bench.year or question.year == bench.year and question.month >= bench.month:
            return True
        else:
            return False
    
    def prevMonthYear(self,offset,cur_date=datetime.now()):
        year = 0
        if cur_date.month==12:
            year = cur_date.year + 1
        else:
            year = cur_date.year
        return year - offset
    
    def prevMonth(self,cur_date=datetime.now()):
        month = 0
        if cur_date.month==1:
            month=11
        else:
            month = cur_date.month - 2
        return month
    
    def getEndOfMonth(self,month,year):
        num_days = 0
        if month == 4 or month == 6 or month == 9 or month == 11:
            num_days = 30
        elif month == 2:
            if calendar.isleap(year):
                num_days = 29
            else:
                num_days = 28
        else:
            num_days = 31
        return num_days

    def dateFormat(self,cur_date,type="DEFAULT"):
        formatted_str = ""
        if type == "LIST":
            formatted_str = self.months[cur_date.month%12] + "'" + str(cur_date.year)[2:]
        elif type == "DEFAULT":
            year = cur_date.year - 2000
            if year < 10:
                year = "0" + str(year)
            else:
                year = str(year)
            formatted_str = months[cur_date.month%12] + "-" + year
        return formatted_str    

    def getState(self,zip_code):
        prefix = str(zip_code)[0:3]
        try:
            prefix = int(prefix)
        except:
            prefix = -1
        if prefix >= 995:
            state = "Alaska"
        elif prefix >= 980:
            state = "Washington"
        elif prefix >= 970:
            state = "Oregon"
        elif prefix >= 967:
            state = "Hawaii"
        elif prefix >= 962:
            state = "None"
        elif prefix >= 900:
            state = "California"
        elif prefix >= 889:
            state = "Nevada"
        elif prefix == 885:
            state = "Texas"
        elif prefix >= 870:
            state = "New Mexico"
        elif prefix >= 850:
            state = "Arizona"
        elif prefix >= 840:
            state = "Utah"
        elif prefix >= 832:
            state = "Idaho"
        elif prefix >= 820:
            state = "Wyoming"
        elif prefix >= 800:
            state = "Colorado"
        elif prefix >= 750:
            state = "Texas"
        elif prefix >= 730:
            state = "Oklahoma"
        elif prefix >= 716:
            state = "Arkansas"
        elif prefix >= 700:
            state = "Lousiana"
        elif prefix >= 680:
            state = "Nebraska"
        elif prefix >= 660:
            state = "Kansas"
        elif prefix >= 630:
            state = "Missouri"
        elif prefix >= 600:
            state = "Illinois"
        elif prefix >= 590:
            state = "Montana"
        elif prefix >= 580:
            state = "North Dakota"
        elif prefix >= 570:
            state = "South Dakota"
        elif prefix >= 569:
            state = "Washington D.C."
        elif prefix >= 550:
            state = "Minnesota"
        elif prefix >= 530:
            state = "Wisconsin"
        elif prefix >= 500:
            state = "Iowa"
        elif prefix >= 480:
            state = "Michigan"
        elif prefix >= 460:
            state = "Indiana"
        elif prefix >= 430:
            state = "Ohio"
        elif prefix >= 400:
            state = "Kentucky"
        elif prefix >= 398:
            state = "Georgia"
        elif prefix >= 386:
            state = "Mississippi"
        elif prefix >= 370:
            state = "Tennessee"
        elif prefix >= 350:
            state = "Alabama"
        elif prefix >= 320:
            state = "Florida"
        elif prefix >= 300:
            state = "Georgia"
        elif prefix >= 290:
            state = "South Carolina"
        elif prefix >= 270:
            state = "North Carolina"
        elif prefix >= 247:
            state = "West Virginia"
        elif prefix >= 220:
            state = "Virginia"
        elif prefix >= 206:
            state = "Maryland"
        elif prefix >= 202:
            state = "Fed. Government"
        elif prefix >= 201:
            state = "Virginia"
        elif prefix >= 200:
            state = "Washington D.C."
        elif prefix >= 197:
            state = "Delaware"
        elif prefix >= 150:
            state = "Pennsylvania"
        elif prefix >= 100:
            state = "New York"
        elif prefix >= 90:
            state = "None"
        elif prefix >= 70:
            state = "New Jersey"
        elif prefix >= 60:
            state = "Connecticut"
        elif prefix >= 50:
            state = "Vermont"
        elif prefix >= 39:
            state = "Maine"
        elif prefix >= 30:
            state = "New Hampshire"
        elif prefix >= 28:
            state = "Rhode Island"
        elif prefix >= 10:
            state = "Massachusetts"
        elif prefix >= 6:
            state = "Puerto Rico"
        else:
            state = "None"
        return state