示例#1
0
 def __init__(self, config=cfg, cache=True):
     if not cache or not os.path.isfile(cfg.data_cache):
         self.train, self.val = self.train_val_split(
             utils.load_csv(cfg.train_csv), 0.9)
         self.test = utils.load_csv(cfg.test_csv, shuffle=False)
         utils.save_cache([self.train, self.val, self.test], cfg.data_cache)
     else:
         self.train, self.val, self.test = utils.load_cache(cfg.data_cache)
示例#2
0
    def download_ureports(self, master_bt):
        for server_name, server_url in self.url.items():
            # Download from all source
            result = self.get_ureport_by_hash(master_hash=master_bt,
                                              source=server_url)

            parse_json = self.parse_hash_from_json(result)  # TODO REMAKE ??

            self.slave_bt[server_name] = parse_json

            if config.CACHE:
                save_cache(server_name + ".json", parse_json)
示例#3
0
    def download_ureports(self, master_bt):
        for server_name, server_url in self.url.items():
            # Download from all source
            result = self.get_ureport_by_hash(master_hash=master_bt,
                                              source=server_url)

            parse_json = self.parse_hash_from_json(result)  # TODO REMAKE ??

            self.slave_bt[server_name] = parse_json

            if config.CACHE:
                save_cache(server_name + ".json", parse_json)
    def read(self, path):
        cache = utils.load_cache("facilities", self.config)

        if cache is None:
            self.progress = tqdm(desc="Loading Facilities ...")
            utils.make_xml_parser(self, utils.open_gzip(path))

            cache = self.process()
            utils.save_cache("facilities", cache, self.config)
        else:
            print("Loaded faciltiies from cache.")

        return cache
    def read(self, path, facility_id_to_index):
        cache = None

        if self.config["use_population_cache"]:
            cache = utils.load_cache("population", self.config)

        if cache is None:
            self.progress = tqdm(desc = "Loading Population ...")
            utils.make_xml_parser(self, utils.open_gzip(path))

            cache = self.process(facility_id_to_index)
            utils.save_cache("population", cache, self.config)
        else:
            print("Loaded population from cache.")

        return cache
示例#6
0
def main():
    last_check_cache = get_current_seconds()
    # смотрим, есть ли у нас данные для восстановления кэша при запуска
    server_cache = reload_cache()
    print(f"Cache: {server_cache}")

    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    sock.bind(('localhost', 53))
    print('Server start working on 53 port')

    while True:
        #вечный цикл работы сервера,обработка данных и проверка записей кэша на актуальность
        try:
            run_server(sock, server_cache)
            last_check_cache = clear_cache(last_check_cache, server_cache)
        except KeyboardInterrupt:
            #работа сервера прекращена,сохраняем кэш для последующего перезапуска
            print('server crashed')
            save_cache(server_cache)
            exit(1)
示例#7
0
def get_movies(cached=False):
    if cached:
        print("Returning cached data")
        return load_cache(MOVIES_CACHE)

    options = Options()
    options.add_argument('--disable-gpu')
    options.add_argument('--no-sandbox')
    options.add_argument('--headless')
    chrome_bin_path = os.environ.get('GOOGLE_CHROME_BIN', None)
    chromedriver_path = os.environ.get('CHROMEDRIVER_PATH', None)
    if not chrome_bin_path or not chromedriver_path:
        print(
            'Chrome problem. Check if chrome and chromedriver are installed and envionmental variables are set.'
        )
        return []

    options.binary_location = chrome_bin_path
    # options.set_headless(headless=True)

    url = create_multikino_url()
    print(f"Getting {url} ...")
    browser = webdriver.Chrome(executable_path=chromedriver_path,
                               options=options)
    browser.get(url)
    html = browser.page_source
    save_to_file("multikino.html", html)
    print(f"browser: {browser}")
    browser.quit()

    movies = []

    print("Parsing...")
    soup = BeautifulSoup(html, "html.parser")
    for movie in soup.find_all(class_='filmlist__info'):
        title = movie.select(".filmlist__title > span")[0].get_text()
        try:
            rating = movie.find(attrs={
                "rv-show": "film.rank_value"
            }).select("span")[0].get_text()
            votes = movie.find(attrs={
                "rv-show": "film.rank_votes"
            }).select("span")[0].get_text()
        except AttributeError:
            print(f"No rating for {title}")
        except Exception as e:
            print(f"Something really bad happend: {e}")

        description = movie.select(".filmlist__synopsis > p")[0].get_text()
        genres = list(
            map(lambda item: item.get_text(),
                movie.find_all("a", class_="film-details__item")))
        genres = ', '.join(genres) or "-"

        if any(keyword in title for keyword in FILTER_KEYWORDS):
            continue

        movie = Movie(title=title,
                      votes=votes,
                      description=description,
                      genres=genres)
        movie.rating.mul = rating
        movies.append(movie)

    hash_movies = {movie.title: movie for movie in movies}

    print('Total movies found (+7 days from now): {}'.format(len(movies)))

    loop = asyncio.new_event_loop()
    print("Filmweb api call...")
    loop.run_until_complete(get_all_filmweb_api_data(hash_movies))
    print("IMDB api call...")
    loop.run_until_complete(get_all_imdb_api_data(hash_movies))

    movies = sort_movies_descending(movies)
    print("Saving cache...")
    save_cache(movies, MOVIES_CACHE)
    print("OK")
    return movies
示例#8
0
def _main(flag_draw, flag_preview, flag_asis, flag_si, address):
    """
    ADDRESS - freeform address to get forecast for
    """

    address = ' '.join(address)  # ewww...
    load_cache(get_location)
    location = get_location(address)
    if not location:
        return 1
    save_cache(get_location)

    if flag_asis:
        nice_address = address
    else:
        nice_address = get_nice_address(location)

    weather = get_weather(location, flag_si)
    if weather is None or "currently" not in weather:
        return 1

    image_black = Image.new('1', (EPD_HEIGHT, EPD_WIDTH), 255)  # 298*126
    image_red = Image.new('1', image_black.size, 255)

    # estimate size of and draw forecast address
    address_text, address_size = get_text_fit(image_black, nice_address,
                                              image_black.size[0] - 4,
                                              CONFIG["font_address"],
                                              CONFIG["font_address_size_min"],
                                              CONFIG["font_address_size"])
    draw_centered_text(image_red, address_text, 0, CONFIG["font_address"],
                       address_size)
    max_address_height = get_font_height(image_black, CONFIG["font_address"],
                                         CONFIG["font_address_size"])

    # estimate sizes of today/tomorrow forecasts
    (d0w, d0h) = draw_icon_temp(image_black,
                                weather["daily"]["data"][0], (0, 0),
                                CONFIG["font_forecast_size"],
                                daily=True,
                                draw_it=False,
                                si_units=flag_si)
    (d1w, d1h) = draw_icon_temp(image_black,
                                weather["daily"]["data"][1], (0, 0),
                                CONFIG["font_forecast_size"],
                                daily=True,
                                draw_it=False,
                                si_units=flag_si)

    # position forecasts nicely
    d_gap = (image_black.size[0] - d0w - d1w) / 3
    d0x = d_gap
    d0y = image_black.size[1] - d0h - 2
    d1x = d_gap + d0w + d_gap
    d1y = d0y

    # actually draw forecasts
    draw_icon_temp(image_black,
                   weather["daily"]["data"][0], (d0x, d0y),
                   CONFIG["font_forecast_size"],
                   daily=True,
                   si_units=flag_si)
    draw_icon_temp(image_black,
                   weather["daily"]["data"][1], (d1x, d1y),
                   CONFIG["font_forecast_size"],
                   daily=True,
                   si_units=flag_si)

    (cw, ch) = draw_icon_temp(image_black,
                              weather["currently"], (0, 0),
                              CONFIG["font_main_size"],
                              daily=False,
                              draw_it=False,
                              si_units=flag_si)
    draw_icon_temp(
        image_black,
        weather["currently"],
        ((image_black.size[0] - cw) / 2, int(max_address_height * 0.9)),
        CONFIG["font_main_size"],
        daily=False,
        si_units=flag_si)

    if flag_preview:
        imgcat(gen_preview(image_black, image_red))
    if flag_draw:
        draw_epaper_horizontal(image_black, image_red)

    return 0
示例#9
0
def build_detector(detector_model_dir,
                   detector_model_names,
                   save_model_name,
                   save_model_dir,
                   model_path,
                   MODEL,
                   det_model,
                   data,
                   data_format,
                   is_det_joint,
                   model_idx,
                   gpu_count=1):
    det_dict = {}
    det_set = {}
    det_idx_set = {}
    dropout_rate_set = {}
    det_gpu_idx = {}

    for val in detector_model_names:
        if val == '':
            continue

        cur_det_name, cur_p, cur_det_type, cur_dropout_rate, cur_model_id = val.split(
            '/')
        cur_model_id = int(cur_model_id)
        cur_det_path = os.path.join(detector_model_dir, cur_det_name)
        cur_detector = {
            "p": cur_p,
            "type": cur_det_type,
            "dropout_rate": cur_dropout_rate
        }
        det_dict[cur_det_name] = cur_detector

        if type(det_model) is list:
            cur_det_model = det_model[cur_model_id]
            cur_model_path = os.path.join(save_model_dir,
                                          save_model_name[cur_model_id])
            cur_det_idx = model_idx[cur_model_id]
        else:
            cur_det_model = det_model
            cur_model_path = model_path
            cur_det_idx = model_idx
        default_det_idx = cur_det_idx

        with tf.device('/gpu:' + str(cur_model_id % gpu_count)):
            # build detector
            print("# build detector: ", cur_det_name)
            print("type:", cur_det_type)
            print("p:", cur_p)
            print("drop_rate:", cur_dropout_rate)

            if cur_det_type == 'AED':
                cur_detector = AEDetector(cur_det_path, p=int(cur_p))
                cur_det_idx = load_model_idx(cur_det_path)
            elif cur_det_type == "DBD":
                id_reformer = IdReformer()
                print("# build reformer", cur_det_name)
                cur_reformer_t = SimpleReformer(cur_det_path)
                classifier = Classifier(cur_model_path,
                                        MODEL,
                                        data_format=data_format,
                                        model=cur_det_model)
                cur_detector = DBDetector(reconstructor=id_reformer,
                                          prober=cur_reformer_t,
                                          classifier=classifier,
                                          T=int(cur_p))
                cur_det_idx = load_model_idx(cur_det_path)

        if cur_det_idx is None:
            cur_det_idx = default_det_idx

        det_idx_set[cur_det_name] = cur_det_idx['validate']

        dropout_rate_set[cur_det_name] = float(cur_dropout_rate)
        det_set[cur_det_name] = cur_detector
        det_gpu_idx[cur_det_name] = cur_model_id % gpu_count

    # compute thrs
    thrs_set = {}
    det_info = {
        "model": save_model_name,
        "model_dir": save_model_dir,
        "det": det_dict,
        "det_dir": detector_model_dir,
        "joint_thrs": is_det_joint
    }

    cache_path = os.path.join(detector_model_dir, "cache")

    if is_det_joint:
        marks_set = []
        num = 0
        cache = load_cache(det_info, cache_path)
        if cache is None:
            cache_data = {}
            for cur_det_name, cur_det in det_set.items():
                validation_data = data.train_data_orig[
                    det_idx_set[cur_det_name]]
                num = int(
                    len(validation_data) * dropout_rate_set[cur_det_name])
                marks = cur_det.mark(validation_data, data_format=data_format)
                marks_set.append(marks)

                marks = np.sort(marks)
                cache_data[cur_det_name] = marks[-num]
                print("compute thrs for model #", cur_det_name, "#:",
                      marks[-num])

            marks_set = np.transpose(marks_set)
            marks_max = np.max(marks_set, axis=1)
            marks_max = np.sort(marks_max)
            max_thrs = marks_max[-num]

            cache_data['thrs'] = max_thrs
            if len(det_set) > 0:
                hash_id = save_cache(det_info, cache_data, cache_path)
                print("save cache:", hash_id)
        else:
            print("hit cache:", cache['hash_id'])
            cache_data = cache['data']
            for cur_det_name, cur_det in det_set.items():
                print("compute thrs for model #", cur_det_name, "#:",
                      cache_data[cur_det_name])
            max_thrs = cache_data['thrs']

        for cur_det_name, cur_det in det_set.items():
            thrs_set[cur_det_name] = max_thrs

        print("use joint thrs:", max_thrs)
    else:
        cache = load_cache(det_info, cache_path)
        if cache is None:
            cache_data = {}
            for cur_det_name, cur_det in det_set.items():
                validation_data = data.train_data_orig[
                    det_idx_set[cur_det_name]]
                num = int(
                    len(validation_data) * dropout_rate_set[cur_det_name])
                marks = cur_det.mark(validation_data, data_format=data_format)
                marks = np.sort(marks)

                thrs_set[cur_det_name] = marks[-num]
                cache_data[cur_det_name] = marks[-num]
                print("compute thrs for model #", cur_det_name, "#:",
                      marks[-num])

            if len(det_set) > 0:
                hash_id = save_cache(det_info, cache_data, cache_path)
                print("save cache:", hash_id)
        else:
            print("hit cache:", cache['hash_id'])
            cache_data = cache['data']
            for cur_det_name, cur_det in det_set.items():
                thrs_set[cur_det_name] = cache_data[cur_det_name]
                print("compute thrs for model #", cur_det_name, "#:",
                      cache_data[cur_det_name])

    return det_set, thrs_set, det_gpu_idx