def __init__(self, output_path_override): """ Creates the report directory if not already present Parameters ------------ None Raises ------------ A FileSystemError error in case the output path could not be created Returns ------------ None """ self.output_path = "./reports/" if output_path_override is not None: self.output_path = output_path_override util.create_dir(self.output_path) self.worksheet_list = [] self.price_dict = {}
def __init__(self, path, **kwargs): ''' Initializes the cache Parameters ---------- path : str The path where the cache will be located max_cache_size_bytes : int (kwargs) (optional) the maximum size of the cache in bytes Returns ----------- A tuple of strings containing the start and end date of the fiscal period ''' try: max_cache_size_bytes = kwargs['max_cache_size_bytes'] except KeyError: # default max cache is 4GB max_cache_size_bytes = 4e9 util.create_dir(path) try: self.disk_cache = Cache(path, size_limit=int(max_cache_size_bytes)) except Exception as e: raise ValidationError('invalid max cache size', e) log.debug("Cache was initialized: %s" % path)
def from_s3_bucket(cls, ticker_object_name: str, app_ns: str): ''' Creates a TickerFile object instane based on an S3 bucket. The bucket is detrmined by looking at the system's ClouFormation exports. Parameters ---------- ticker_object_name : str S3 object name app_ns : str Application namespace used to identify the appropriate CloudFormation exports ''' s3_object_path = "%s/%s" % (constants.S3_TICKER_FILE_FOLDER_PREFIX, ticker_object_name) destination_path = "%s/%s" % (constants.APP_DATA_DIR, ticker_object_name) log.debug( "Reading S3 Data Bucket location from CloudFormation Exports") s3_data_bucket_name = aws_service_wrapper.cf_read_export_value( constants.s3_data_bucket_export_name(app_ns)) util.create_dir(constants.APP_DATA_DIR) log.debug("Downloading s3://%s --> %s" % (s3_object_path, destination_path)) try: aws_service_wrapper.s3_download_object(s3_data_bucket_name, s3_object_path, destination_path) except AWSError as awe: if "(404)" in str(awe) and "Not Found" in str(awe): log.debug( "File not found in S3. Looking for local alternatives") # Attempt to upload a local copy of the file if it exists local_ticker_path = '%s/%s' % (constants.TICKER_DATA_DIR, ticker_object_name) if os.path.isfile(local_ticker_path): log.debug("Attempting to upload %s --> s3://%s/%s" % (local_ticker_path, s3_data_bucket_name, s3_object_path)) aws_service_wrapper.s3_upload_object( local_ticker_path, s3_data_bucket_name, s3_object_path) return cls.from_local_file(constants.TICKER_DATA_DIR, ticker_object_name) else: log.debug("No local alternatives found") raise awe else: raise awe return cls.from_local_file(constants.APP_DATA_DIR, ticker_object_name)
def from_s3(cls, app_ns: str, s3_object_name: str): ''' loads the model from S3 using preconfigured object names ''' util.create_dir(constants.APP_DATA_DIR) s3_data_bucket_name = aws_service_wrapper.cf_read_export_value( constants.s3_data_bucket_export_name(app_ns)) s3_object_path = "%s/%s" % (cls.model_s3_folder_prefix, s3_object_name) dest_path = "%s/%s" % (constants.APP_DATA_DIR, s3_object_name) log.info( "Downloading %s: s3://%s/%s --> %s" % (cls.model_name, s3_data_bucket_name, s3_object_path, dest_path)) aws_service_wrapper.s3_download_object(s3_data_bucket_name, s3_object_path, dest_path) return cls.from_local_file(dest_path)