def load_data(self): if not self._read_extd(): KScrap.lm_scraping(self._lm) KScrap.ve_scraping(self._ve) KScrap.qu_scraping(self._qu) KScrap.q1_scraping(self._q1, self._index) KScrap.cq_scraping(self._cq, self._cqp) if self.ext_data_ok: self._save_extd() self._calc_mean() self._save_mean()
def __init__(self, index = DEFAULT_INDEX): self._index = index self._k = [] # Try read data from a local file. if index != DEFAULT_INDEX: self._read_k() # If not read from local, retrieve from external source. if not self.loaded: self._k, self._index = KScrap.k_scraping() if self.loaded: self._save_k() else: # If data isn't retrieved, update the index with the value # received. self._index = index
def __init__(self, index = DEFAULT_INDEX): self._index = index self._b1 = [] self._a2 = [] # Try read data from a local file. self._read_cldata() # If not read from local, retrieve from external source. if not self.loaded: self._b1, self._a2 = KScrap.scrap_cl_data() if self.loaded: self._save_cldata() else: # If data isn't retrieved, update the index with the value # received. self._index = index
def _scrap_res(max_range, file_dir, url_prefix, data_size): for i in range(1, max_range + 1): i_str = str(i).zfill(2) file_name = os.path.join(os.getcwd(), file_dir, RES_FILE_PREFIX + i_str + INPUT_FILE_NAME_EXT) if not os.path.exists(file_name): print "Retrieving data for file: %s" % file_name url = url_prefix + i_str data = KScrap.res_scraping(url) # If data could not be get, exit. if len(data) > data_size * 4: ResData._save_res_data(file_name, data) else: print "Exiting as no data has been retrieved for: %s." % file_name break
def __init__(self, index=DEFAULT_INDEX): self._index = index self._k = [] # Try read data from a local file. if index != DEFAULT_INDEX: self._read_k() # If not read from local, retrieve from external source. if not self.loaded: self._k, self._index = KScrap.k_scraping() if self.loaded: self._save_k() else: # If data isn't retrieved, update the index with the value # received. self._index = index
def _scrap_res(max_range, file_dir, url_prefix, data_size): for i in range(1, max_range + 1): i_str = str(i).zfill(2) file_name = os.path.join(os.getcwd(), file_dir, RES_FILE_PREFIX + i_str + INPUT_FILE_NAME_EXT) if not os.path.exists(file_name): print "Retrieving data for file: %s" % file_name url = url_prefix + i_str data = KScrap.res_scraping(url) # If data could not be get, exit. if len(data) > data_size * 4: ResData._save_res_data(file_name, data) else: print "Exiting as no data has been retrieved for: %s." % \ file_name break
def __init__(self, index=DEFAULT_INDEX): self._index = index self._b1 = [] self._a2 = [] if index != NO_READ_INDEX: # Try read data from a local file. file_name = self._get_file_to_read() self.read_cldata(file_name) # If not read from local, retrieve from external source. if not self.loaded: self._b1, self._a2 = KScrap.scrap_cl_data() if self.loaded: self._save_cldata() else: # If data isn't retrieved, update the index with the value # received. self._index = index