def load_company(self): d = self.root_dir / "company" csv = d / "company.csv" json = d / "columns.json" if not (d.exists() and csv.exists() and json.exists()): self.company = None logger.info(f"could not load company data") else: self.company = BCDataCompany(_read_csv(csv), _read_json(json)) logger.info(f"loaded company data")
def load_daily(self): d = self.root_dir / "daily" pkl = d / "all.pickle" json = d / "columns.json" if not (d.exists() and json.exists()): self.daily = None logger.info(f"could not load daily data") else: if not pkl.exists(): # pickle ファイルがなければ作る BCDataAbs.csvs_to_pickle(d) self.daily = BCDataDaily(_read_pickle(pkl), _read_json(json)) logger.info(f"loaded daily data")
def load_indicator(self): d = self.root_dir / "indicator" pkl = d / "all.pickle" json = d / "columns.json" if not (d.exists() and json.exists()): self.indicator = None logger.info(f"could not load indicator data") else: if not pkl.exists(): # pickle ファイルがなければ作る BCDataAbs.csvs_to_pickle(d) self.indicator = BCDataIndicator(_read_pickle(pkl), _read_json(json)) logger.info(f"loaded indicator data")
def _load_files(self): base_path = self.root index_file = base_path.joinpath('indexmap.csv') if not index_file.exists(): raise FileNotFoundError( "indexmap.csv is not found in root directory, make sure the file is exist before we can coninue to load the data!" ) df = pd.read_csv(index_file) error_index = [] for index, data in df.iterrows(): image = base_path.joinpath(data['image_file']) json = base_path.joinpath(data['json_file']) if not (image.exists() and json.exists()): error_index.append(index) else: self.image_files.append(image) self.json_files.append(json) if len(error_index) > 0: raise FileNotFoundError( f"File in indexmap with number {error_index} is not found, please fix the file before we can continue to load the data!" )