def _download(self): """download PBF extract from the URL and store it locally""" try: if os.path.exists(self._source_data_path): return # TODO: DEBUG, remove this else: if os.path.exists(self._temp_storage_path): source_log.info('removing old folder %s' % self._temp_storage_path) shutil.rmtree(self._temp_storage_path) utils.createFolderPath(self._temp_storage_path) # f = open(self.sourceDataPath, "w") # request = urllib2.urlopen(self.url) urllib.urlretrieve(self._url, self._source_data_path) # f.write(request.read()) # f.close() return True except Exception: message = 'monav package: OSM PBF download failed\n' message += 'name: %s\n' % self.name message += 'URL: %s\n' % self._url message += 'storage path: %s' % self._source_data_path source_log.exception(message) # print(e) # traceback.print_exc(file=sys.stdout) return False
def _pre_update(self): """make sure temporary & publishing folders exist""" if utils.createFolderPath(self.temp_path) == False: return False if utils.createFolderPath(self.publish_path) == False: return False return True
def _getTFSubPath(self, subPath): """return a tracklog folder sub path, assure the patch exists before returning it""" tracklogFolderPath = self.modrana.paths.getTracklogsFolderPath() if tracklogFolderPath is None: print("loadTracklogs: can't get tracklog sub path - tracklog folder path is unknown") return None # tracklog folder path is unknown else: TFSubPath = os.path.join(tracklogFolderPath, subPath) utils.createFolderPath(TFSubPath) return TFSubPath
def _getTFSubPath(self, subPath): """return a tracklog folder sub path, assure the patch exists before returning it""" tracklogFolderPath = self.modrana.paths.getTracklogsFolderPath() if tracklogFolderPath is None: self.log.error("can't get tracklog sub path - tracklog folder path is unknown") return None # tracklog folder path is unknown else: TFSubPath = os.path.join(tracklogFolderPath, subPath) utils.createFolderPath(TFSubPath) return TFSubPath
def __init__(self, modrana): self.modrana = modrana # get profile folder path # -> first check for device module override if self.modrana.dmod.profilePath: self._profileFolderPath = self.modrana.dmod.profilePath else: self._profileFolderPath = self.modrana.getProfilePath() # check the profile path and create the folders if necessary utils.createFolderPath(self._profileFolderPath) # load version string self.versionString = None
def getProfilePath(self): """return the profile folder (create it if it does not exist) NOTE: this function is provided here in the main class as some ordinary modRana modules need to know the profile folder path before the option module that normally handles it is fully initialized (for example the config module might need to copy default configuration files to the profile folder in its init) """ # get the path modRanaProfileFolderName = '.modrana' userHomePath = os.getenv("HOME", "") profileFolderPath = os.path.join(userHomePath, modRanaProfileFolderName) # make sure it exists utils.createFolderPath(profileFolderPath) # return it return profileFolderPath
def enable_log_file(self, compression=False): """Enable logging Tsubame log messages to file. If this is called during startup, early log messages preceding the log file activation are dumped to the log, so no messages from a Tsubame run should be missing from the log file. """ # attempt to enable the log file with self._log_file_enabled_lock: # check if the log file is not already enabled if self._log_file_enabled: self._root_tsubame_logger.error("log file already exist") return # first try to make sure the logging folder actually exists if not utils.createFolderPath(self.log_folder_path): self._root_tsubame_logger.error("failed to create logging folder in: %s", self.log_folder_path) return self._log_file_compression = compression # create a file logger that logs everything log_file_path = os.path.join(self.log_folder_path, self._get_log_filename(compression=compression)) if compression: if sys.version_info >= (3, 0): self._compressed_log_file = gzip.open(log_file_path, mode="wt", encoding="utf-8") else: self._compressed_log_file = gzip.open(log_file_path, mode="wb") self._file_handler = logging.StreamHandler(self._compressed_log_file) else: self._file_handler = logging.FileHandler(log_file_path) self._file_handler.setLevel(logging.DEBUG) full_formatter = logging.Formatter('%(asctime)s %(levelname)s %(name)s: %(message)s') self._file_handler.setFormatter(full_formatter) # dump any early log messages to the log file if self._memory_handler: self._memory_handler.setTarget(self._file_handler) self._memory_handler.flush() # write all the early log records self._file_handler.flush() # now attach the log file to the root logger self._root_tsubame_logger.addHandler(self._file_handler) # flush the memory logger again in case any messages arrived before # the last flush and connecting the log file to the root logger # (this might duplicate some messages, but we should not loose any # as both the MemoryHandler and root logger are connected at the moment) # now flush & nuke the MemoryHandler self._root_tsubame_logger.removeHandler(self._memory_handler) self._memory_handler.flush() self._memory_handler.close() self._memory_handler = None else : # just attach the log file to the root logger self._root_tsubame_logger.addHandler(self._file_handler) self.log_file_path = log_file_path self._log_file_enabled = True self._root_tsubame_logger.info("log file enabled: %s" % log_file_path)
def enable_log_file(self, compression=False): """Enable logging modRana log messages to file. If this is called during startup, early log messages preceding the log file activation are dumped to the log, so no messages from a modRana run should be missing from the log file. """ # attempt to enable the log file with self._log_file_enabled_lock: # check if the log file is not already enabled if self._log_file_enabled: self._root_modrana_logger.error("log file already exist") return # first try to make sure the logging folder actually exists if not utils.createFolderPath(self.log_folder_path): self._root_modrana_logger.error("failed to create logging folder in: %s", self.log_folder_path) return self._log_file_compression = compression # create a file logger that logs everything log_file_path = os.path.join(self.log_folder_path, self._get_log_filename(compression=compression)) if compression: if sys.version_info >= (3, 0): self._compressed_log_file = gzip.open(log_file_path, mode="wt", encoding="utf-8") else: self._compressed_log_file = gzip.open(log_file_path, mode="wb") self._file_handler = logging.StreamHandler(self._compressed_log_file) else: self._file_handler = logging.FileHandler(log_file_path) self._file_handler.setLevel(logging.DEBUG) full_formatter = logging.Formatter('%(asctime)s %(levelname)s %(name)s: %(message)s') self._file_handler.setFormatter(full_formatter) # dump any early log messages to the log file if self._memory_handler: self._memory_handler.setTarget(self._file_handler) self._memory_handler.flush() # write all the early log records self._file_handler.flush() # now attach the log file to the root logger self._root_modrana_logger.addHandler(self._file_handler) # flush the memory logger again in case any messages arrived before # the last flush and connecting the log file to the root logger # (this might duplicate some messages, but we should not loose any # as both the MemoryHandler and root logger are connected at the moment) # now flush & nuke the MemoryHandler self._root_modrana_logger.removeHandler(self._memory_handler) self._memory_handler.flush() self._memory_handler.close() self._memory_handler = None else : # just attach the log file to the root logger self._root_modrana_logger.addHandler(self._file_handler) self.log_file_path = log_file_path self._log_file_enabled = True self._root_modrana_logger.info("log file enabled: %s" % log_file_path)
def _loadFromFile(self): """Use local PBF file as data source""" try: if os.path.exists(self._source_data_path): if os.path.exists(self._temp_storage_path): source_log.info('removing old temporary folder %s', self._temp_storage_path) shutil.rmtree(self._temp_storage_path) utils.createFolderPath(self._temp_storage_path) return True except Exception: message = 'monav package: OSM PBF loading failed\n' message += 'name: %s\n' % self.name message += 'filePath: %s\n' % self.source_file_path message += 'storage path: %s' % self._source_data_path source_log.exception(message) return False
def publish(self, main_repo_path, cleanup=True): """publish the package to the online repository""" for path2file in self.results: finalRepoPath = os.path.join(main_repo_path, self._repo_sub_path) try: # try to make sure the folder exists utils.createFolderPath(finalRepoPath) # move the results shutil.move(path2file, finalRepoPath) except Exception: message = 'monav package: publishing failed\n' message += 'file: %s' % path2file message += 'target path: %s' % finalRepoPath publish_log.exception(message) if cleanup: # clean up any source & temporary files self.clear_all()
def _createBasicFolderStructure(self): """trigger creation of the logs, misc and online folders also copy example tracklogs, if necessary""" self._getTFSubPath("logs") self._getTFSubPath("online") self._getTFSubPath("misc") # if there is no example folder, create it # and copy example tracklogs into it tfp = self.modrana.paths.getTracklogsFolderPath() examplesDestinationPath = os.path.join(tfp, 'examples') if not os.path.exists(examplesDestinationPath): utils.createFolderPath(examplesDestinationPath) print(' ** loadTracklogs: copying example tracklogs') examplesSourcePath = 'data/tracklog_examples' # copy all files from this folder for item in os.listdir(examplesSourcePath): path = os.path.join(examplesSourcePath, item) if os.path.isfile(path): print(' ** copying: %r' % item) shutil.copy(path, os.path.join(examplesDestinationPath, item)) print(' ** DONE')
def _createBasicFolderStructure(self): """trigger creation of the logs, misc and online folders also copy example tracklogs, if necessary""" self._getTFSubPath("logs") self._getTFSubPath("online") self._getTFSubPath("misc") # if there is no example folder, create it # and copy example tracklogs into it tfp = self.modrana.paths.getTracklogsFolderPath() examplesDestinationPath = os.path.join(tfp, 'examples') if not os.path.exists(examplesDestinationPath): utils.createFolderPath(examplesDestinationPath) self.log.info(' ** copying example tracklogs') try: examplesSourcePath = 'data/tracklog_examples' # copy all files from this folder for item in os.listdir(examplesSourcePath): path = os.path.join(examplesSourcePath, item) if os.path.isfile(path): self.log.info(' ** copying: %r', item) shutil.copy(path, os.path.join(examplesDestinationPath, item)) self.log.info(' ** DONE') except Exception: self.log.exception("could not copy example tracklogs")
def saveToFile(self, filePath): # first try to make sure the folder for storing # the JSON file exists with self._mutex: success = False if utils.createFolderPath(os.path.dirname(filePath)): try: # the Python JSON module has some issues with serializing # unicode strings, so we need to make it dump the dict to # string, utf encode it and then save it to file manually jsonString = json.dumps(self, ensure_ascii=False, indent=True) jsonString.encode('utf8') with open(filePath, "w") as f: f.write(jsonString) success = True except Exception: log.exception("saving to JSON file failed") else: log.error("JSONDict: can't save file to: %s", filePath) return success
#!/usr/bin/python import os import time from core.utils import prettyTimeDiff, createFolderPath log_folder = "logs/source_data_update_logs_%s" % time.strftime("%Y.%m.%d-%H:%M:%S") log_folder = os.path.abspath(log_folder) # as the source data update can happen independently on the repository update # it has it's own log folder - it should be easy to correlate which source data # update was followed by which repository update by the date if not createFolderPath(log_folder): print("ERROR: can't create log folder for the source data update run in:") print(log_folder) print("log data for this update run might not be gathered or the update run might fail outright") planet_update_log = os.path.join(log_folder, "update_planet.log") planet_split_log = os.path.join(log_folder, "split_planet.log") continents_split_log = os.path.join(log_folder, "split_continents.log") print("starting modRana repository source data update") start=time.time() print("updating the planet osm file") planet_update_rc = os.system("./tools/update_planet.py>%s" % planet_update_log) # only do the sanity check if the download was successful if planet_update_rc == 0: planet_update_rc = os.system("./tools/sanity_check_update_planet.py>>%s" % planet_update_log) dt = int(time.time() - start) print("planet osm file update finished in %s" % prettyTimeDiff(dt))
def _assurePath(self, path): """assure path exists and return it back""" # check if the path exists and create it if not utils.createFolderPath(path) return path
def getProfilePath(self): """return path to the profile folder""" # check if the path exists and create it if not utils.createFolderPath(self._profileFolderPath) return self._profileFolderPath