def test_wrong_compression_format(self): event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') curr_month, prev_month = utils.get_months_range() key = "{0}/{1}/12345678-1234-1234-1234-123456789123/{2}".format( os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']) manifest_content = {"reportKeys": ["{}-1.csv.zip".format(key)]} utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']), json.dumps(manifest_content)) s3res = boto3.resource('s3') utils.upload_gzipped(s3res, os.environ['S3_BUCKET_NAME'], manifest_content["reportKeys"][0], SAMPLE_CSV_ZIP_1) event = { "detail-type": "Scheduled Event", "source": "aws.events", "time": event_time } # TODO - catch exact string in the error try: worker.lambda_handler(event, {}) except zlib_error: return assert True, "wrong compression - {}".format(zlib_error)
def test_no_report(self): event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') curr_month, prev_month = utils.get_months_range() key = "{0}/{1}/12345678-1234-1234-1234-123456789123/{2}".format( os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']) manifest_content = {"reportKeys": ["{}-1.csv.gz".format(key)]} utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']), json.dumps(manifest_content)) event = { "detail-type": "Scheduled Event", "source": "aws.events", "time": event_time } with self.assertRaises(s3client.exceptions.NoSuchKey): worker.lambda_handler(event, {})
def test_latest_csv_file(self): event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') curr_month, prev_month = utils.get_months_range() # put empty folder utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/".format(os.environ['REPORT_PATH'], curr_month), '') # put older folder with json file prev_json_content = {"reportKeys": ["location1"]} utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], prev_month, os.environ['REPORT_NAME']), json.dumps(prev_json_content)) # check we find it in the previous month env_var = { 'logzio_url': os.environ['URL'], 'token': os.environ['TOKEN'], 'bucket': os.environ['S3_BUCKET_NAME'], 'report_path': os.environ['REPORT_PATH'], 'report_name': os.environ['REPORT_NAME'] } keys = worker._latest_csv_keys(s3client, env_var, event_time) self.assertEqual( keys[0], "location1", "Unexpected key in the json file - {0} - {1}".format( prev_month, keys[0])) # upload json file to current month folder curr_json_content = {"reportKeys": ["location2"]} utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']), json.dumps(curr_json_content)) keys = worker._latest_csv_keys(s3client, env_var, event_time) self.assertEqual( keys[0], "location2", "Unexpected key in the json file - {0} - {1}".format( curr_month, keys[0]))
import matplotlib.pyplot as plt import rospy #import rospyimport matplotlib.pyplot as plt import tf #from utils import * #import utils #rospy.init_node("recognition") from ipywidgets import interact if __name__=='__main__': try: rgbd = RGBD() utils.put_object("e_lego_duplo", 0.4, 0.0, 0.0) utils.move_head_tilt(-1) image_data = rgbd.get_image() plt.imshow(image_data) image_data.shape image_data[0][0] points_data = rgbd.get_points() plt.imshow(points_data['z']) points_data['z'].shape points_data['z'][0][0] h_image = rgbd.get_h_image() plt.imshow(h_image)
def test_multi_reports(self): event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') curr_month, prev_month = utils.get_months_range() key = "{0}/{1}/12345678-1234-1234-1234-123456789123/{2}".format( os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']) # set two files as latest report manifest_content = { "reportKeys": ["{}-1.csv.gz".format(key), "{}-2.csv.gz".format(key)] } utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']), json.dumps(manifest_content)) # upload two files s3res = boto3.resource('s3') utils.upload_gzipped(s3res, os.environ['S3_BUCKET_NAME'], manifest_content["reportKeys"][0], SAMPLE_CSV_GZIP_1) utils.upload_gzipped(s3res, os.environ['S3_BUCKET_NAME'], manifest_content["reportKeys"][1], SAMPLE_CSV_GZIP_2) # user flow env_var = { 'logzio_url': os.environ['URL'], 'token': os.environ['TOKEN'], 'bucket': os.environ['S3_BUCKET_NAME'], 'report_path': os.environ['REPORT_PATH'], 'report_name': os.environ['REPORT_NAME'] } latest_csv_keys = worker._latest_csv_keys(s3client, env_var, event_time) readers = [] ship = shipper.LogzioShipper(self._logzio_url) # first csv csv_like_obj1 = s3client.get_object(Bucket=env_var['bucket'], Key=latest_csv_keys[0]) gen1 = worker.CSVLineGenerator(csv_like_obj1['Body']) csv_lines1 = gen1.headers for line in gen1.stream_line(): csv_lines1 += line readers.append(DictReader(csv_lines1.splitlines(True))) # second csv csv_like_obj2 = s3client.get_object(Bucket=env_var['bucket'], Key=latest_csv_keys[1]) gen2 = worker.CSVLineGenerator(csv_like_obj2['Body']) csv_lines2 = gen2.headers for line in gen2.stream_line(): csv_lines2 += line readers.append(DictReader(csv_lines2.splitlines(True))) # now we can use http mock httpretty.register_uri(httpretty.POST, self._logzio_url) httpretty.enable() for line in csv_lines1.splitlines()[1:]: ship.add( worker._parse_file(gen1.headers.split(','), csv.reader([line]).next(), event_time)) ship.flush() for line in csv_lines2.splitlines()[1:]: ship.add( worker._parse_file(gen2.headers.split(','), csv.reader([line]).next(), event_time)) ship.flush() self.assertTrue( utils.verify_requests(readers, httpretty.HTTPretty.latest_requests), "Something wrong parsing...") httpretty.disable()