def _secondaryText(self): if self.downloadPool.ended \ and self.requestCount == 0 \ and self._downloadPool.downloadedDataSize: prettyMB = utils.bytes2PrettyUnitString(self.downloadPool.downloadedDataSize) return "%s has been downloaded" % prettyMB elif self.running: if self.checkSizeRunning: prettyMB = utils.bytes2PrettyUnitString(self._checkPool.downloadSize) return "batch size is ~%s, %d tiles found locally" % (prettyMB, self._checkPool.foundLocally) elif self.batchDownloadRunning: prettyMB = utils.bytes2PrettyUnitString(self._downloadPool.downloadedDataSize) if self._checkPool.downloadSize: estimatedSizePrettyMB = utils.bytes2PrettyUnitString(self._checkPool.downloadSize) prettyMB = "%s/~%s" % (prettyMB, estimatedSizePrettyMB) return "%s downloaded" % prettyMB else: if self._checkPool.ended: if self._checkPool.downloadSize: prettyMB = utils.bytes2PrettyUnitString(self._checkPool.downloadSize) return "Total size is ~%s (<i>click to recheck</i>)." % prettyMB else: return "" else: return "Total size of tiles is unknown (<i>click to check</i>)."
def _download_source_data(self): csv_file_path = self.manager.monav_csv_path # get a CSV line count to get approximate repository update progress urlCount = utils.countCSVLines(csv_file_path) if not urlCount: # just to be sure urlCount = 0 f = open(csv_file_path, "r") reader = csv.reader(f) source_log.info('source data downloader starting') # read all URLs to a list urls = [] for row in reader: if len(row) > 0: urls.append(row[0]) f.close() if self.manager.args.monav_dont_sort_urls: source_log.info('URL sorting disabled') sorted_urls = map(lambda x: (0, x), urls) else: # sort the URLs by size source_log.info('sorting URLs by size in ascending order') sorted_urls, totalSize = utils.sortUrlsBySize(urls) source_log.info('total download size: %s', utils.bytes2PrettyUnitString(totalSize)) # download all the URLs pack_id = 0 for size, url in sorted_urls: try: metadata = { 'packId': pack_id, 'tempPath': self.temp_path, 'helperPath': self.folder_name, 'preprocessorPath': self._preprocessor_path, 'url' : url, 'urlType': self._get_source_url_type() } pack = MonavPackage(metadata) pack_id+= 1 if size is None: size_string = "unknown size" else: size_string = utils.bytes2PrettyUnitString(size) source_log.info('downloading %d/%d: %s (%s)', pack_id, urlCount, pack.name, size_string) pack.load() self.source_queue.put(pack) except Exception: source_log.exception('loading url failed: %s', url) #source_log.info(e) #traceback.print_exc(file=sys.stdout) #source_log.exception("traceback:") source_log.info('all downloads finished')
def _generate_monav_packages(self, source_folder, file_size_list): # generate a package for every PBF file pack_id = 0 for f, f_size in file_size_list: try: metadata = { 'packId': pack_id, 'tempPath': self.temp_path, 'helperPath': self.folder_name, 'preprocessorPath': self._preprocessor_path, 'filePath' : f, 'filePathPrefix' : source_folder } pack = MonavPackage(metadata) pack_id += 1 size_string = utils.bytes2PrettyUnitString(f_size) file_count = len(file_size_list) source_log.info('loading %d/%d: %s (%s)', pack_id, file_count, pack.name, size_string) pack.load() self.source_queue.put(pack) except Exception: source_log.exception('loading PBF file failed: %s', f) #source_log.error(e) #traceback.print_exc(file=sys.stdout) #source_log.exception("traceback:") source_log.info('all source files loaded')
def getFreeSpaceString(self): """Return a string describing the space available on the filesystem where the tile-folder is located :returns: string describing free space in tile folder :rtype: str """ path = self.modrana.paths.getMapFolderPath() prettySpace = utils.bytes2PrettyUnitString(utils.freeSpaceInPath(path)) return prettySpace
def listAvailableTracklogs(self): print("** making a list of available tracklogs") tf = self.modrana.paths.getTracklogsFolderPath() # does the tracklog folder exist ? if tf is None or not os.path.exists(tf): return # no tracklog folder, nothing to list # get the available directories, # each directory represents a category currentFolders = os.listdir(tf) # leave just folders (that are not hidden) currentFolders = filter(lambda x: os.path.isdir(os.path.join(tf, x)) and not x.startswith('.'), currentFolders) # add files from all available folders availableFiles = [] pathList = [] for folder in currentFolders: #TODO: support other tracklogs folderFiles = glob.glob(os.path.join(tf, folder, '*.gpx')) folderFiles.extend(glob.glob(os.path.join(tf, folder, '*.GPX'))) # remove possible folders folderFiles = filter(lambda x: os.path.isfile(x), folderFiles) for file in folderFiles: path = file filename = os.path.split(file)[1] lastModifiedEpochSecs = os.path.getmtime(path) lastModified = strftime("%d.%m.%Y %H:%M:%S", gmtime(lastModifiedEpochSecs)) size = utils.bytes2PrettyUnitString(os.path.getsize(path)) extension = os.path.splitext(path)[1] cat = folder item = {'path': path, 'filename': filename, 'lastModified': lastModified, 'size': size, 'type': extension[1:], 'cat': cat } availableFiles.append(item) pathList.extend(folderFiles) self.categoryList = currentFolders print("* using this tracklog folder:") print("* %s" % self.modrana.paths.getTracklogsFolderPath()) print("* there are %d tracklogs available" % len(availableFiles)) print("**") self.tracklogPathList = pathList self.tracklogList = availableFiles
def listAvailableTracklogs(self): self.log.info("** making a list of available tracklogs") tf = self.modrana.paths.getTracklogsFolderPath() # does the tracklog folder exist ? if tf is None or not os.path.exists(tf): return # no tracklog folder, nothing to list # get the available directories, # each directory represents a category currentFolders = os.listdir(tf) # leave just folders (that are not hidden) currentFolders = filter(lambda x: os.path.isdir(os.path.join(tf, x)) and not x.startswith('.'), currentFolders) # add files from all available folders availableFiles = [] pathList = [] for folder in currentFolders: #TODO: support other tracklogs folderFiles = glob.glob(os.path.join(tf, folder, '*.gpx')) folderFiles.extend(glob.glob(os.path.join(tf, folder, '*.GPX'))) # remove possible folders folderFiles = filter(lambda x: os.path.isfile(x), folderFiles) for file in folderFiles: path = file filename = os.path.split(file)[1] lastModifiedEpochSecs = os.path.getmtime(path) lastModified = strftime("%d.%m.%Y %H:%M:%S", gmtime(lastModifiedEpochSecs)) size = utils.bytes2PrettyUnitString(os.path.getsize(path)) extension = os.path.splitext(path)[1] cat = folder item = {'path': path, 'filename': filename, 'lastModified': lastModified, 'size': size, 'type': extension[1:], 'cat': cat } availableFiles.append(item) pathList.extend(folderFiles) self.categoryList = currentFolders self.log.info("* using this tracklog folder:") self.log.info("* %s" % self.modrana.paths.getTracklogsFolderPath()) self.log.info("* there are %d tracklogs available" % len(availableFiles)) self.log.info("**") self.tracklogPathList = pathList self.tracklogList = availableFiles
def getFreeSpaceString(self): """Return a string describing the space available on the filesystem where the tile-folder is located :returns: string describing free space in tile folder :rtype: str """ path = self.modrana.paths.getMapFolderPath() free_space = utils.freeSpaceInPath(path) if free_space is not None: prettySpace = utils.bytes2PrettyUnitString( utils.freeSpaceInPath(path)) return prettySpace else: return "unknown"
def _load_local_source_data(self, source_folder): source_log.info('local source data loader starting') # store all PBF files to a list, so we # can print some stats on them & process them files = [] accumulated_size = 0 source_log.info("loading data from local folder:") source_log.info("%s", os.path.abspath(source_folder)) for root, dirs, dirFiles in os.walk(source_folder): for f in dirFiles: if os.path.splitext(f)[1] == ".pbf": file_path = os.path.join(root, f) file_size = os.path.getsize(file_path) accumulated_size += file_size files.append( (file_path, file_size) ) file_count = len(files) source_log.info("found %d PBF files together %s in size", file_count, utils.bytes2PrettyUnitString(accumulated_size) ) return files
def _secondaryText(self): if self.downloadPool.ended \ and self.requestCount == 0 \ and self._downloadPool.downloadedDataSize: prettyMB = utils.bytes2PrettyUnitString( self.downloadPool.downloadedDataSize) return "%s has been downloaded" % prettyMB elif self.running: if self.checkSizeRunning: prettyMB = utils.bytes2PrettyUnitString( self._checkPool.downloadSize) return "batch size is ~%s, %d tiles found locally" % ( prettyMB, self._checkPool.foundLocally) elif self.batchDownloadRunning: prettyMB = utils.bytes2PrettyUnitString( self._downloadPool.downloadedDataSize) if self._checkPool.downloadSize: estimatedSizePrettyMB = utils.bytes2PrettyUnitString( self._checkPool.downloadSize) prettyMB = "%s/~%s" % (prettyMB, estimatedSizePrettyMB) else: approxDlSize = self.approxDownloadSize if approxDlSize >= 0: prettyMB = "%s/~%s" % ( prettyMB, utils.bytes2PrettyUnitString(approxDlSize)) return "%s downloaded" % prettyMB else: if self._checkPool.ended: if self._checkPool.downloadSize: prettyMB = utils.bytes2PrettyUnitString( self._checkPool.downloadSize) return "Total size is ~%s (<i>click to recheck</i>)." % prettyMB else: return "" else: return "Total size of tiles is unknown (<i>click to check</i>)."