Beispiel #1
0
class queueManager:
    def __init__(self, inputmbrs):
        self.qu1 = kNNLRUCache(100)
        self.qu2 = LRUCache(100)
        self.mbrs = inputmbrs
        self.hit1 = 0
        self.miss1 = 0
        self.hit2 = 0
        self.miss2 = 0

    def getSpatial(self, x, y, r):
        for mbr in self.mbrs:
            if mbr.dist2p(x, y) < r:
                # print("checking" + str(mbr))
                result1 = self.qu1.get(str(mbr))
                if result1 == -1:
                    self.miss1 += 1
                    self.qu1.set(str(mbr), 0)
                else:
                    self.hit1 += 1
                result2 = self.qu2.get(str(mbr))
                if result2 == -1:
                    self.miss2 += 1
                    self.qu2.set(str(mbr), 0)
                else:
                    self.hit2 += 1
                if result1 != result2:
                    print("different!")
Beispiel #2
0
    def test_operation2(self):

        cache = LRUCache(1)
        cache.set(2, 1, 11)
        self.assertEqual(cache.get(2), 1)

        cache.set(3, 2, 11)
        self.assertEqual(cache.get(2), -1)
        self.assertEqual(cache.get(3), 2)
Beispiel #3
0
    def test_operation1(self):

        cache = LRUCache(2)

        cache.set(2, 1, 11)
        cache.set(1, 1, 11)
        self.assertEqual(cache.get(2), 1)
        cache.set(4, 1, 11)
        self.assertEqual(cache.get(1), -1)
        self.assertEqual(cache.get(2), 1)
Beispiel #4
0
def main():
    global tablet
    global normal_set
    global TP
    global FP
    global FN
    global label_y
    global predict_y

    file_dir = '/cbs_trace1/sample_6/'

    #load Cache
    MemoryCache = LRUCache(memory_size)
    SSDCache = LRUCache(cache_size)

    #load Classifier
    clf = Classifier()

    current_time = 1538323200

    last = 0
    for i in range(1, 31):
        ssd_hit_num = 0
        memory_hit_num = 0
        total_read = 0

        total_write = 0
        memory_write_num = 0
        memory_write_hit = 0
        ssd_write_num = 0
        hdd_write_num = 0

        predict_time = 0
        predict_cnt = 0

        data_x = []
        data_y = []

        if clf.load("%d.clf" % (last)):
            predict_flag = 1
        else:
            predict_flag = 0

        print('----------------------------------')
        cudate = time.strftime("%Y-%m-%d", time.localtime(current_time))
        print(cudate)
        current_time += 86400
        print('----------------------------------')
        file_name = file_dir + str(i)
        f = open(file_name, "r")
        for line in f.readlines():
            io_record = line.split(",")

            #update tablet
            key = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 11)
            if key in tablet:
                a = tablet[key]
            else:
                a = [0 for i in range(0, feature_num)]
                tablet[key] = a
            if int(io_record[3]) == 1:
                a[info_table['WriteSize']] += int(io_record[2])
                a[info_table['WriteCnt']] += 1

                if a[info_table['LastWrite']] != 0:
                    a[info_table['WriteInterval']] += abs(
                        int(io_record[0]) - a[info_table['LastWrite']])
                a[info_table['LastWrite']] = int(io_record[0])

                if int(io_record[2]) > 64:
                    a[info_table['BigWrite']] += 1
                elif int(io_record[2]) <= 8:
                    a[info_table['SmallWrite']] += 1

            else:
                a[info_table['ReadSize']] += int(io_record[2])
                a[info_table['ReadCnt']] += 1

                if a[info_table['LastRead']] != 0:
                    a[info_table['ReadInterval']] += abs(
                        int(io_record[0]) - a[info_table['LastRead']])
                a[info_table['LastRead']] = int(io_record[0])

                if int(io_record[2]) > 64:
                    a[info_table['BigRead']] += 1
                elif int(io_record[2]) <= 8:
                    a[info_table['SmallRead']] += 1

            #update Cache
            block = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 6)
            if int(io_record[3]) == 1:
                #write
                total_write += 1
                if MemoryCache.get(block) != None:
                    memory_write_num += 1
                    memory_write_hit += 1
                else:
                    #reach condition to flush, flush first!
                    flush_data = MemoryCache.flush()
                    if flush_data != None:
                        feature_tmp = []
                        flush_key = []
                        for key in flush_data:
                            flush_key.append(key[0])
                            feature_tmp.append(process_eviction(key, 0)[0])

                        if predict_flag == 1:
                            oldtime = datetime.now()
                            rst = clf.predict(feature_tmp)
                            newtime = datetime.now()
                            predict_cnt += len(flush_key)
                            predict_time += int(
                                (newtime - oldtime).microseconds)
                            for key, ft in zip(flush_key, rst):
                                tablet_key = "%s,%d" % (key.split(
                                    ",")[0], int(key.split(",")[1]) >> 5)
                                if key in SSDCache.cache:
                                    ssd_write_num += 1

                                else:
                                    if ft == 1:
                                        #only write
                                        hdd_write_num += 1
                                        tablet[tablet_key][
                                            info_table['Predict']] = 1
                                    else:
                                        tablet[tablet_key][
                                            info_table['Predict']] = 2
                                        ssd_write_num += 1
                                        ssd_evict = SSDCache.set(key, 1)
                                        if ssd_evict != None:
                                            hdd_write_num += 1
                                            label = process_eviction(
                                                ssd_evict, 1)
                                            data_x.append(label[0])
                                            data_y.append(label[1])

                        else:
                            for key in flush_key:
                                ssd_write_num += 1
                                ssd_evict = SSDCache.set(key, 1)
                                if ssd_evict != None:
                                    hdd_write_num += 1
                                    label = process_eviction(ssd_evict, 1)
                                    data_x.append(label[0])
                                    data_y.append(label[1])

                    MemoryCache.set(block, 1)

            else:
                #read
                total_read += 1

                if MemoryCache.get(block) != None:
                    #hit
                    memory_hit_num += 1
                else:
                    if SSDCache.get(block) != None:
                        #hit
                        ssd_hit_num += 1
                        a[info_table['Hit']] += 1

                    else:
                        ssd_evict = SSDCache.set(block, 1)
                        ssd_write_num += 1
                        if ssd_evict != None:
                            hdd_write_num += 1
                            label = process_eviction(ssd_evict, 1)
                            data_x.append(label[0])
                            data_y.append(label[1])

        print("SSD footprint : %.2f %%" %
              (len(SSDCache.cache) * 100 / cache_size))
        print("Memory footprint : %.2f %%" %
              (len(MemoryCache.cache) * 100 / memory_size))
        print("memory_write : %.2f %%" %
              (memory_write_num * 100 / total_write))
        print("memory_write_hit : %.2f %%" %
              (memory_write_hit * 100 / total_write))
        print("ssd_write : %.2f %%" % ((ssd_write_num) * 100 / total_write))
        print("hdd_write : %.2f %%" % (hdd_write_num * 100 / total_write))
        print("Memory Hit Ratio : %.2f %%" %
              (memory_hit_num * 100 / total_read))
        print("SSD Hit Ratio : %.2f %%" %
              ((memory_hit_num + ssd_hit_num) * 100 / total_read))

        oldtime = datetime.now()
        #training model for next day
        last = i
        #save Classifier
        if len(data_x) > 0:
            clf = Classifier()
            clf.load("%d.clf" % (last))
            clf.fit(data_x, data_y)
            clf.save("%d.clf" % (last))
        newtime = datetime.now()
        train_time = int((newtime - oldtime).microseconds)

        #result
        file = 'result/hwp_result'
        f = open(file, 'a+')

        #time,memory_write,ssd_write,hdd_write,ssd_write_ratio,hdd_write_ratio,memory_hit,ssd_hit,accuracy,recall,predict_time,auc,train_time,recall2,positive_ratio
        if TP + FP == 0:
            f.write(
                "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.4f\t%d\t%.2f\t%.2f\n"
                % (current_time - 86400, total_write, ssd_write_num,
                   hdd_write_num, ssd_write_num * 100 / total_write,
                   hdd_write_num * 100 / total_write,
                   memory_hit_num * 100 / total_read,
                   (memory_hit_num + ssd_hit_num) * 100 / total_read, 0, 0, 0,
                   0, train_time, 0, 0))
        else:
            auc = metrics.roc_auc_score(label_y, predict_y)
            total = len(label_y)
            right = 0
            pos = 0
            t1 = 0
            for l, p in zip(label_y, predict_y):
                if l == p:
                    right += 1
                if l == 1:
                    pos += 1
                    if p == 1:
                        t1 += 1

            f.write(
                "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.4f\t%d\t%.2f\t%.2f\n"
                % (current_time - 86400, total_write, ssd_write_num,
                   hdd_write_num, ssd_write_num * 100 / total_write,
                   hdd_write_num * 100 / total_write,
                   memory_hit_num * 100 / total_read,
                   (memory_hit_num + ssd_hit_num) * 100 / total_read,
                   right * 100 / total, TP * 100 /
                   (TP + FN), predict_time / predict_cnt, auc, train_time,
                   t1 * 100 / pos, pos * 100 / total))
        f.close()
Beispiel #5
0
import time
from LRUCache import LRUCache

"""
TEST #1: INSERT ELEMENTS UNTIL CACHE GETS FULL
"""
print("Test #1: INSERT ELEMENTS UNTIL CACHE GETS FULL.")
capacity = 5
cache_test_1 = LRUCache(capacity)
for key in range(capacity+1):
    value = key
    cache_test_1.set(key, value)

print(cache_test_1)
print("-----------------------------------------------")

"""
TEST #2: INSERT ELEMENT WHEN CHACHE IS FULL
"""
print("")
print("Test #2: INSERT AN ELEMENT WHEN CACHE IS FULL.")
print("Element with key #1 should be deleted")
extra_element_key = capacity+1  # extra_element equals to 6
extra_element_value = capacity + 1
cache_test_1.set(extra_element_key,extra_element_value)
print(cache_test_1)
print("-----------------------------------------------")

"""
TEST #3: TRYING TO GET AN ELEMENT WHICH IS NOT INSIDE CACHE
"""
Beispiel #6
0
class UCAC4:
    """ Uses UCAC4 catalog to search ucac4 numbers from coords and vice versa. Ucac4 path can be passed or read from $ucac4_path """

    def __init__(self, ucac_path=None):
        if ucac_path is None:
            try:
                ucac_path = Path(os.environ["ucac4_path"])
                print("found environ with ucac4 path", ucac_path)
            except KeyError:
                logging.debug(
                    "No ucac path passed, and no environment var, using default"
                )
                ucac_path = Path("./support/ucac4/UCAC4/")
        logging.info(f"Ucac path is {ucac_path}")
        # star id given by munipack
        self.ucac_path = ucac_path
        self.zones_cache = LRUCache(capacity=10)
        self.bucket_cache = LRUCache(capacity=100000)
        self.zone_bucket_cache = LRUCache(capacity=10000)
        self.index_cache = None
        self.ra_range = 360 * 3600 * 100
        # read index file
        with open(
            str(Path(ucac_path, "u4i", "u4index.unf")), mode="rb"
        ) as file:  # b is important -> binary
            self.index_cache = file.read()
        self.zone_starformat = "=iiHHBBBbbBBBHHhhbbIHHHBBBBBBHHHHHbbbbbBIBBIHI"
        self.zone_star_length = struct.calcsize(self.zone_starformat)
        self.unpack_zone_fileformat = struct.Struct(self.zone_starformat).unpack
        (
            self.result_n0_running_star_number,
            self.result_nn_stars_in_bin,
        ) = UCAC4._get_n0_and_nn(self.index_cache)

    def get_zone_filecontent(self, zone: int):
        """ gets the content of a zone file, either from disk or from cache"""
        result = self.zones_cache.get(zone)
        if result != -1:
            return result
        with open(
            str(Path(self.ucac_path, f"u4b/z{zone:03}")), mode="rb"
        ) as file:  # b is important -> binary
            result = file.read()
            self.zones_cache.set(zone, result)
            return result

    def get_ucactuple_from_id(self, ucac_id) -> UcacTuple:
        """ Given a UCAC ID, return a tuple of (StarTuple, zone, run_nr) """
        zone, run_nr = UCAC4.ucac_id_to_zone_and_run_nr(ucac_id)
        logging.debug(f"UCAC4 id {zone}, {run_nr}")
        return self.get_ucactuples_for_zone_and_runnrs(zone, [run_nr])[0]

    def get_ra_dec_from_id(self, ucac4_id) -> Tuple[float, float]:
        startuple, _, _ = self.get_ucactuple_from_id(ucac4_id)
        ra, dec = UCAC4.get_real_ra_dec(startuple.ra, startuple.spd)
        return ra, dec

    def index_bin_to_run_nrs(self, zone: int, index_bin: int):
        # index = (zone - 1) * 1440 + index_bin
        index = (index_bin - 1) * 900 + zone - 1
        star_run_nr = self.result_n0_running_star_number[index]
        star_count_in_bucket = self.result_nn_stars_in_bin[index]
        run_nrs = list(range(star_run_nr, star_run_nr + star_count_in_bucket))
        logging.debug(
            f"index_bin_to_run_nrs: zone is {zone}, index_bin = {index_bin}, index is {index}. "
            f"star_run_nr is {star_run_nr}, count =  {star_count_in_bucket}, run nrs: {run_nrs}"
        )
        return run_nrs

    def get_zones_and_index_bins(self, ra, dec, tolerance_deg) -> Dict[int, List[int]]:
        logging.debug(f"ra: {ra}, dec: {dec}, tolerance: {tolerance_deg}")
        zone = self.get_zone_for_dec(dec - tolerance_deg / 2)
        end_zone = self.get_zone_for_dec(dec + tolerance_deg / 2)
        # check for zone overlap
        zones = list(range(zone, end_zone + 1))
        logging.debug(f"get_zones_and_index_bins: Zone range is {zones}")
        ra_low = self.ra_bin_index(ra - tolerance_deg / 2)  # ra_start in C code
        ra_high = self.ra_bin_index(ra + tolerance_deg / 2)
        index_bins = list(range(ra_low, ra_high + 1))
        logging.debug(f"get_zones_and_index_bins: {ra_low}, {ra_high}, {index_bins}")
        return zones, index_bins

    def get_ucactuples_for_zone_and_runnrs(
        self, zone: int, run_nr_list: List[int]
    ) -> UcacTupleList:
        """ Given a zone and one or more run_nr's, return List of (StarTuple, zone, run_nr) """
        stars = []
        filecontent = self.get_zone_filecontent(zone)
        for run_nr in run_nr_list:
            key = (zone, run_nr)
            # logging.debug(f"Getting star from zone {zone}, run_nr {run_nr}")
            star = self.bucket_cache.get(key)
            if star == -1:
                result = self.unpack_zone_fileformat(
                    filecontent[
                        self.zone_star_length
                        * (run_nr - 1) : self.zone_star_length
                        * run_nr
                    ]
                )
                star = UCAC4.make_startuple(result)
                self.bucket_cache.set(key, star)
            stars.append((star, zone, run_nr))
            # logging.debug(f"read ucac4 star: {star}")
        return stars

    @staticmethod
    def _get_n0_and_nn(index_cache):
        index_format = "1296000I"
        index_length = struct.calcsize(index_format)
        logging.debug(f"index length  is {index_length}")
        unpack_starformat = struct.Struct(index_format).unpack
        result_n0 = unpack_starformat(index_cache[0:index_length])
        result_nn = unpack_starformat(index_cache[index_length:])
        return result_n0, result_nn

    def get_region_minimal_star_tuples(
        self, ra: float, dec: float, radius=0.5
    ) -> List[MinimalStarTuple]:
        """ For a given ra/dec and radius, return all ucac4 stars as ('id, ra, dec, mag') """
        zones, buckets = self.get_zones_and_index_bins(ra, dec, radius)
        result = []
        for zone in zones:
            for bucket in buckets:
                cache_key = (zone, bucket)
                ucactuples: UcacTupleList = self.zone_bucket_cache.get(cache_key)
                # cache miss
                if ucactuples == -1:
                    ucactuples: UcacTupleList = self.get_ucactuples_for_zone_and_runnrs(
                        zone, self.index_bin_to_run_nrs(zone, bucket)
                    )
                    self.zone_bucket_cache.set(cache_key, ucactuples)
                if len(ucactuples) == 0:
                    logging.debug(f"zone/bucket: {zone}/{bucket}, no stars")
                    if bucket + 1 not in buckets:
                        buckets.append(bucket + 1)
                        logging.debug(f"Appending bucket {bucket+1}")
                for ucactuple in ucactuples:
                    result.append(
                        MinimalStarTuple(
                            UCAC4.zone_and_run_nr_to_name(ucactuple[1], ucactuple[2]),
                            *UCAC4.get_real_ra_dec(ucactuple[0].ra, ucactuple[0].spd),
                            ucactuple[0].apass_mag_V / 1000,
                        )
                    )
        return result

    def get_sd_from_ra_dec(
        self, ra: float, dec: float, tolerance_deg=0.02
    ) -> StarDescription:
        return self.get_star_description_from_tuple(
            self.get_ucactuple_from_ra_dec(ra, dec, tolerance_deg)
        )


    def get_ucactuple_from_ra_dec(
        self, ra: float, dec: float, tolerance_deg=0.02
    ) -> UcacTuple:
        logging.debug(
            f"get_ucac4_ucactuple_from_ra_dec with ra:{ra}, dec:{dec}, tolerance:{tolerance_deg}"
        )
        target_np = np.array((ra, dec))
        zones, buckets = self.get_zones_and_index_bins(ra, dec, tolerance_deg)
        smallest_dist = 1000
        best = None
        for zone in zones:
            for bucket in buckets:
                cache_key = (zone, bucket)
                ucactuples: UcacTupleList = self.zone_bucket_cache.get(cache_key)
                # cache miss
                if ucactuples == -1:
                    ucactuples: UcacTupleList = self.get_ucactuples_for_zone_and_runnrs(
                        zone, self.index_bin_to_run_nrs(zone, bucket)
                    )
                    self.zone_bucket_cache.set(cache_key, ucactuples)
                ################ DEBUG
                if len(ucactuples) > 0:
                    radecs = [
                        self.get_real_ra_dec(x[0].ra, x[0].spd) for x in ucactuples
                    ]
                    ras = [x[0] for x in radecs]
                    decs = [x[1] for x in radecs]
                    logging.debug(
                        f"zone/bucket: {zone}/{bucket}, Searching between {min(ras)}, {max(ras)}, {min(decs)}, {max(decs)}"
                    )
                else:
                    logging.debug(f"zone/bucket: {zone}/{bucket}, no stars")
                    if bucket + 1 not in buckets:
                        buckets.append(bucket + 1)
                        logging.debug(f"Appending bucket {bucket+1}")
                ################ DEBUG
                for ucactuple in ucactuples:
                    dist = np.linalg.norm(
                        np.array(
                            (UCAC4.get_real_ra_dec(ucactuple[0].ra, ucactuple[0].spd))
                            - target_np
                        )
                    )
                    # logging.debug(f"magj: {sd[0].mag_j}")
                    if dist < smallest_dist:
                        smallest_dist = dist
                        best = ucactuple
        if best is None:
            logging.warning(
                f"Did not find a UCAC4 match for {ra}, {dec}, {tolerance_deg}. Buckets: {buckets}, "
                f"zones: {zones},smallest dist: {smallest_dist}"
            )
            return best
        logging.debug(f"Best distance is: {smallest_dist}, {best}")
        return best

    @staticmethod
    def get_zone_for_dec(dec: float, zone_height: float = 0.2) -> int:
        dec_0 = decimal.Decimal(dec) + decimal.Decimal(90.0)
        result = min(900, max(1, int(dec_0 / decimal.Decimal(zone_height)) + 1))
        logging.debug(
            f"get_zone_for_dec: dec {dec}, height:{zone_height}, dec0 {dec_0}, result {result}"
        )
        return result

    @staticmethod
    def ra_bin_index(ra: float):
        """index for bins along RA (1 to 1440)"""
        index = math.ceil(decimal.Decimal(ra) * decimal.Decimal(4))  # 1440/360
        logging.debug(
            f"ra_bin_index: ra {ra}, index {index}, rawindex: {decimal.Decimal(ra) * decimal.Decimal(4)}"
        )
        return max(1, index)

    @staticmethod
    def ucac_id_to_zone_and_run_nr(ucac_id: str):
        full_id = ucac_id[6:]
        zone = int(full_id[:3])
        run_nr = int(full_id[4:].lstrip("0"))
        return zone, run_nr

    @staticmethod
    def zone_and_run_nr_to_name(zone: int, run_nr: int):
        return f"UCAC4 {zone:03}-{run_nr:06}"

    def get_star_description_from_id(self, ucac4_id) -> StarDescription:
        return UCAC4.get_star_description_from_tuple(
            self.get_ucactuple_from_id(ucac4_id)
        )

    @staticmethod
    def get_star_descriptions_from_tuples(
        ucactuples: UcacTupleList,
    ) -> List[StarDescription]:
        return [
            UCAC4.get_star_description_from_tuple(ucactuple) for ucactuple in ucactuples
        ]

    @staticmethod
    def get_star_description_from_tuple(ucactuple: UcacTuple) -> StarDescription:
        startuple, zone, run_nr = ucactuple
        ra, dec = UCAC4.get_real_ra_dec(startuple.ra, startuple.spd)
        coords=SkyCoord(ra, dec, unit="deg")
        vmag=startuple.apass_mag_V / 1000
        vmag_err=abs(startuple.apass_mag_sigma_V / 100)
        aavso_id=UCAC4.zone_and_run_nr_to_name(zone, run_nr)
        sd = StarDescription(
            coords=coords,
            vmag=vmag,
            vmag_err=vmag_err,
            aavso_id=aavso_id,
        )
        do_calibration.add_catalog_data_to_sd(
            sd,
            vmag,
            vmag_err,
            aavso_id,
            "UCAC4",
            coords,
            extradata=ucactuple[0]._asdict() # get the StarTuple from the UcacTuple
        )
        return sd


    @staticmethod
    def get_real_ra_dec(ra, spd) -> Tuple[float, float]:
        # return ra / 1000 / 3600, (spd - 324000000) / 1000 / 3600
        divisor = decimal.Decimal(3600000)
        return (
            float(decimal.Decimal(ra) / divisor),
            float(decimal.Decimal(spd - 324000000) / divisor),
        )

    @staticmethod
    def make_startuple(result: List) -> StarTuple:
        star = StarTuple._make(result)
        star = star._replace(ra_sigma=star.ra_sigma + 128)
        star = star._replace(dec_sigma=star.dec_sigma + 128)
        star = star._replace(pm_ra_sigma=star.pm_ra_sigma + 128)
        star = star._replace(pm_dec_sigma=star.pm_dec_sigma + 128)
        return star

    def add_sd_metadatas(self, stars: List[StarDescription], overwrite=False):
        with tqdm.tqdm(total=len(stars), desc="Adding UCAC4", unit="stars") as pbar:
            for star in stars:
                if not star.has_metadata("UCAC4") or overwrite:
                    sd = self.get_sd_from_ra_dec(
                        star.coords.ra.deg, star.coords.dec.deg
                    )
                    self._add_catalog_data_to_sd(star, sd, overwrite)
                pbar.update(1)

    def add_sd_metadata_from_id(
        self, star: StarDescription, ucac4_id: str, overwrite=False
    ):
        sd = self.get_star_description_from_id(ucac4_id)
        self._add_catalog_data_to_sd(star, sd, overwrite)

    @staticmethod
    def _add_catalog_data_to_sd(
        sd: StarDescription, ucac4_sd: StarDescription, overwrite
    ):
        """ Add UCAC4 catalog data to a stardescription if there is none yet, or if overwrite is True """
        if ucac4_sd is not None and not sd.has_metadata("UCAC4") or overwrite:
            do_calibration.add_catalog_data_to_sd(
                sd,
                ucac4_sd.vmag,
                ucac4_sd.vmag_err,
                ucac4_sd.aavso_id,
                "UCAC4",
                ucac4_sd.coords,
                extradata=ucac4_sd.get_metadata("UCAC4").extradata,
            )
Beispiel #7
0
def main():
    file_dir = '/cbs_trace1/sample_6/'

    MemoryCache = LRUCache(memory_size)
    SSDCache = LRUCache(cache_size)

    current_time = 1538323200
    for i in range(1, 31):

        ssd_hit_num = 0
        memory_hit_num = 0
        total_read = 0

        total_write = 0
        memory_write_num = 0
        ssd_write_num = 0
        hdd_write_num = 0

        print('----------------------------------')
        cudate = time.strftime("%Y-%m-%d", time.localtime(current_time))
        print(cudate)
        current_time += 86400
        print('----------------------------------')
        file_name = file_dir + str(i)
        f = open(file_name, "r")
        print(file_name)
        for line in f.readlines():
            io_record = line.split(",")

            #update tablet
            key = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 11)
            if key in tablet:
                a = tablet[key]
                a[0] = a[0] + 1
            else:
                a = [1 for i in range(0, feature_num)]
                tablet[key] = a

            block = io_record[4][:-2] + ',' + str(int(io_record[1]) >> 6)
            if int(io_record[3]) == 1:
                #write
                total_write += 1
                ssd_write_num += 1
                ssd_evict = SSDCache.set(block, 1)
                if ssd_evict != None:
                    hdd_write_num += 1

            else:
                #read
                total_read += 1
                if SSDCache.get(block) != None:
                    #命中
                    ssd_hit_num += 1
                else:
                    evict = SSDCache.set(block, 1)
                    ssd_write_num += 1
                    if evict != None:
                        hdd_write_num += 1

        print("SSD footprint : %.2f %%" %
              (len(SSDCache.cache) * 100 / cache_size))
        print("memory_write : %.2f %%" %
              (memory_write_num * 100 / total_write))
        #need to write ssd log once when write to memory once
        print("ssd_write : %.2f %%" %
              ((ssd_write_num + memory_write_num) * 100 / total_write))
        print("hdd_write : %.2f %%" % (hdd_write_num * 100 / total_write))
        print("Memory Hit Ratio : %.2f %%" %
              (memory_hit_num * 100 / total_read))
        print("SSD Hit Ratio : %.2f %%" %
              ((memory_hit_num + ssd_hit_num) * 100 / total_read))

        #result
        file = 'result/current'
        f = open(file, 'a+')
        #time,ssd_write,hdd_write,memory_hit,ssd_hit
        f.write(
            "%d\t%d\t%d\t%d\t%.2f\t%.2f\t%.2f\t%.2f\n" %
            (current_time - 86400, total_write, ssd_write_num, hdd_write_num,
             (ssd_write_num) * 100 / total_write, hdd_write_num * 100 /
             total_write, memory_hit_num * 100 / total_read,
             (memory_hit_num + ssd_hit_num) * 100 / total_read))
        f.close()
Beispiel #8
0
class MyTestCase(unittest.TestCase):
    def setUp(self):
        self.cache = LRUCache(4)

    def test_set_get(self):
        self.cache.set('Jesse', 'James')
        self.assertTrue(self.cache.get('Jesse'), 'James')

    def test_delete(self):
        self.cache.set('Jesse', 'James')
        self.cache.get('Jesse')
        self.cache.delete('Walter')

    def test_out_of_memory(self):
        self.cache.set('Jesse', 'James')
        self.cache.set('Jim', 'Richard')
        self.cache.set('Smith', 'Alex')
        self.cache.set('Walter', 'White')

        self.cache.set(
            'Jesse',
            'John')  # ('Jesse', 'John') will be moved to the end of the list

        self.cache.set('Williams',
                       'Jack')  # ('Jim', 'Richard') will be deleted

        self.assertEqual(self.cache.get('Jim'), None)
        self.assertEqual(self.cache.get('Smith'), 'Alex')

        self.cache.set('Jim', 'Richard')  # ('Walter', 'White') will be deleted

        self.assertEqual(self.cache.get('Walter'), None)
Beispiel #9
0
from LRUCache import LRUCache

if __name__ == '__main__':
    cache = LRUCache(100)
    cache.set('Jesse', 'Pinkman')
    cache.set('Walter', 'White')
    cache.set('Jesse', 'James')
    cache.get('Jesse')  # вернёт 'James'
    cache.delete('Walter')
    cache.get('Walter')  # вернёт ''