def test_bad_url(self): httpretty.register_uri(httpretty.POST, self._logzio_url, status=404) ship = shipper.LogzioShipper(self._logzio_url) # send csv info to our mock server with gzip.open(SAMPLE_CSV_GZIP_1) as f: csv_lines = f.read().decode('utf-8').splitlines(True) event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') r = csv.reader(csv_lines, delimiter=',') tmp_headers = r.next() headers = [header.replace('/', '_') for header in tmp_headers] with self.assertRaises(UnknownURL): for row in r: ship.add(worker._parse_file(headers, row, event_time)) ship.flush()
def test_parsed_logs(self): httpretty.register_uri(httpretty.POST, self._logzio_url) # send csv info to our mock server with gzip.open(SAMPLE_CSV_GZIP_1) as f: csv_lines = f.read().decode('utf-8').splitlines(True) event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') ship = shipper.LogzioShipper(self._logzio_url) r = csv.reader(csv_lines, delimiter=',') tmp_headers = r.next() headers = [header.replace('/', '_') for header in tmp_headers] for row in r: ship.add(worker._parse_file(headers, row, event_time)) ship.flush() reader = DictReader(csv_lines) self.assertTrue( utils.verify_requests([reader], httpretty.HTTPretty.latest_requests), "Something wrong parsing...")
def test_multi_reports(self): event_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') curr_month, prev_month = utils.get_months_range() key = "{0}/{1}/12345678-1234-1234-1234-123456789123/{2}".format( os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']) # set two files as latest report manifest_content = { "reportKeys": ["{}-1.csv.gz".format(key), "{}-2.csv.gz".format(key)] } utils.put_object( s3client, os.environ['S3_BUCKET_NAME'], "{0}/{1}/{2}-Manifest.json".format(os.environ['REPORT_PATH'], curr_month, os.environ['REPORT_NAME']), json.dumps(manifest_content)) # upload two files s3res = boto3.resource('s3') utils.upload_gzipped(s3res, os.environ['S3_BUCKET_NAME'], manifest_content["reportKeys"][0], SAMPLE_CSV_GZIP_1) utils.upload_gzipped(s3res, os.environ['S3_BUCKET_NAME'], manifest_content["reportKeys"][1], SAMPLE_CSV_GZIP_2) # user flow env_var = { 'logzio_url': os.environ['URL'], 'token': os.environ['TOKEN'], 'bucket': os.environ['S3_BUCKET_NAME'], 'report_path': os.environ['REPORT_PATH'], 'report_name': os.environ['REPORT_NAME'] } latest_csv_keys = worker._latest_csv_keys(s3client, env_var, event_time) readers = [] ship = shipper.LogzioShipper(self._logzio_url) # first csv csv_like_obj1 = s3client.get_object(Bucket=env_var['bucket'], Key=latest_csv_keys[0]) gen1 = worker.CSVLineGenerator(csv_like_obj1['Body']) csv_lines1 = gen1.headers for line in gen1.stream_line(): csv_lines1 += line readers.append(DictReader(csv_lines1.splitlines(True))) # second csv csv_like_obj2 = s3client.get_object(Bucket=env_var['bucket'], Key=latest_csv_keys[1]) gen2 = worker.CSVLineGenerator(csv_like_obj2['Body']) csv_lines2 = gen2.headers for line in gen2.stream_line(): csv_lines2 += line readers.append(DictReader(csv_lines2.splitlines(True))) # now we can use http mock httpretty.register_uri(httpretty.POST, self._logzio_url) httpretty.enable() for line in csv_lines1.splitlines()[1:]: ship.add( worker._parse_file(gen1.headers.split(','), csv.reader([line]).next(), event_time)) ship.flush() for line in csv_lines2.splitlines()[1:]: ship.add( worker._parse_file(gen2.headers.split(','), csv.reader([line]).next(), event_time)) ship.flush() self.assertTrue( utils.verify_requests(readers, httpretty.HTTPretty.latest_requests), "Something wrong parsing...") httpretty.disable()