def test_update_sensor(self): """Tests that the sensors are properly updated.""" for geo in ["state","hrr"]: td = TemporaryDirectory() su_inst = CHCSensorUpdator( "02-01-2020", "06-01-2020", "06-12-2020", geo, self.parallel, self.weekday, self.numtype, self.se ) with mock_s3(): # Create the fake bucket we will be using params = read_params() aws_credentials = params["aws_credentials"] s3_client = Session(**aws_credentials).client("s3") s3_client.create_bucket(Bucket=params["bucket_name"]) su_inst.update_sensor( self.small_test_data, td.name) assert len(os.listdir(td.name)) == len(su_inst.sensor_dates),\ f"failed {geo} update sensor test" td.cleanup()
def run_as_module(date): # Clean directories for fname in listdir("receiving"): if ".csv" in fname: remove(join("receiving", fname)) for fname in listdir("cache"): if ".csv" in fname: remove(join("cache", fname)) for fname in listdir("daily_cache"): if ".csv" in fname: remove(join("daily_cache", fname)) # Simulate the cache already being partially populated copy("test_data/weekly_202025_state_wip_deaths_covid_incidence_prop.csv", "daily_cache") for fname in listdir("daily_receiving"): if ".csv" in fname: remove(join("daily_receiving", fname)) with mock_s3(): with freeze_time(date): # Create the fake bucket we will be using params = read_params() aws_credentials = params["aws_credentials"] s3_client = Session(**aws_credentials).client("s3") s3_client.create_bucket(Bucket=params["bucket_name"]) run_module()
def run_as_module(): # Clean receiving directory for fname in listdir("receiving"): if fname != ".gitignore": remove(join("receiving", fname)) with mock_s3(): # Create the fake bucket we will be using params = read_params() aws_credentials = params["aws_credentials"] s3_client = Session(**aws_credentials).client("s3") s3_client.create_bucket(Bucket=params["bucket_name"]) run_module()
class s3Instanciator: def __init__(self, profile, name): self.client = client('s3') self.profile = profile self.s3 = Session(profile_name=self.profile).client('s3') self.name = name def create(self): self.s3.create_bucket( Bucket=self.name, CreateBucketConfiguration={ 'LocationConstraint': Session(profile_name=self.profile).region_name })
continue elif error_code == 403: print( "This is an existing bucket that you do not have access to.\n" ) continue elif error_code == 404: print("Bucket does not exist.\n") continue elif (bucketAnswer.lower() == "n" or bucketAnswer.lower() == "no"): try: newBucket = input("New Bucket Name: ") s3.create_bucket(Bucket=newBucket, CreateBucketConfiguration={ "LocationConstraint": REGION if configuration else "us-west-2" }) bucketName = newBucket break except botocore.exceptions.ClientError as e: print( "Invalid bucket name. Bucket names must be between 3 and 63 and consist of lowercase letters and numbers (no special characters).\n" ) continue elif (bucketAnswer.lower() == "exit" or bucketAnswer.lower() == "q" or bucketAnswer.lower() == "quit"): print("\n***************************************") print("******Exiting the backup program.******") print("***************************************\n") exit(-1)