Ejemplo n.º 1
0
    def _merge_tiles(self):
        dx, dy = self._get_delta()
        self.log('Merging tiles...')
        filename = '%s%s' % (self.file_name_prefix, self.tile_format)
        tiles = []
        imx = 0
        imy = 0
        for x in range(dx):
            imy = 0
            height = 0
            for y in reversed(range(dy)):
                tile_file = os.path.join(self.tile_dir,
                                         "%s_%s%s" % (x, y, self.tile_format))
                try:
                    tile = Image.open(tile_file)
                    tiles.append((tile, (imx, imy)))
                    imy += tile.width
                    if tile.height > height:
                        height = tile.height
                except Exception as er:
                    logger.warning(er)
            imx += height
        path = os.path.join(self.output_dir, filename)

        self.real_width = imx
        self.real_height = imy

        out = Image.new('RGB', (self.real_width, self.real_height))
        for t in tiles:
            out.paste(t[0], t[1])
        out.save(path)
        return path
Ejemplo n.º 2
0
def make_request_with_proxy(url):
    proxies = proxy_handling.load_proxies_from_file()
    if not proxies:
        proxy_handling.update_proxies()
        proxies = proxy_handling.load_proxies_from_file()
    removed = False
    tries = 3  # number of tries for each proxy
    for proxy in proxies:
        for i in range(1, tries + 1):  # how many tries for each proxy
            try:
                # print('%i iteration of proxy %s' % (i, proxy), end="")
                proxy_handler = urllib2.ProxyHandler({
                    'http': proxy,
                    'https': proxy
                })
                opener = urllib2.build_opener(proxy_handler)
                urllib2.install_opener(opener)
                headers = {
                    'user-agent':
                    'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36',
                    'referer': 'htpps://www.google.com/'
                }
                request = urllib2.Request(url, headers=headers)
                f = urllib2.urlopen(request)
                read = f.read()
                return read
            except Exception as er:
                logger.warning(er)
            if i == tries:
                proxies.remove(proxy)
                removed = True
    if removed:
        proxy_handling.dump_proxies_to_file(proxies)
Ejemplo n.º 3
0
def make_request(url, with_proxy=False):
    # original function
    if url:
        url = url.encode('utf-8')
        logger.debug(url)
        if with_proxy:
            proxies = proxy_handling.load_proxies()
            if proxies and len(proxies) and proxies[0] != 'None':
                return make_request_with_proxy(url)
        try:
            f = urllib2.urlopen(url)
            read = f.read()
            return read
        except Exception as er:
            logger.warning(er)
            raise TimeoutException()
    return False
Ejemplo n.º 4
0
    def get_image_url(self, x, y):
        output_format = self.output_format
        if self.clear_code and self.extent:
            if self.total == 1:
                dx, dy = map(lambda x: x if x > 500 else 500, self.tile_size)
            else:
                dx, dy = self.tile_size
            code = self.clear_code

            layers = map(str, range(0, 20))
            params = {
                "dpi": 96,
                "transparent": "false",
                "format": "png",
                "layers": "show:%s" % ",".join(layers),
                "bbox": ",".join(map(str, self._get_bbox_by_xy(x, y))),
                "bboxSR": 102100,
                "imageSR": 102100,
                "size": "%s,%s" % (dx, dy),
                "layerDefs":
                {layer: str("ID = '%s'" % code)
                 for layer in layers},
                "f": "json"
            }
            if output_format:
                params["format"] = output_format
            url_parts = list(urlparse.urlparse(self.url))
            query = dict(urlparse.parse_qsl(url_parts[4]))
            query.update(params)
            url_parts[4] = urlencode(query)
            meta_url = urlparse.urlunparse(url_parts)
            if meta_url:
                try:
                    response = self.make_request(meta_url)
                    data = json.loads(response)
                    if data.get("href"):
                        self._image_extent_list.append(data.get("extent"))
                        return meta_url.replace("f=json", "f=image")
                    else:
                        logger.warning("Can't get image meta data from: %s" %
                                       meta_url)
                except Exception as er:
                    logger.warning(er)
        elif not self.extent:
            logger.warning("Can't get image without extent")
        return False
Ejemplo n.º 5
0
 def task_wrapper(*args):
     try:
         result.put(target(*args))
     except TimeoutException:
         logger.warning("Waiting time exceeded")