def from_s3(overwrite=False): """Import test results from S3 and insert it to database.""" s3 = S3.factory(bucket='juju-qa-data', directory='cwr') ds = Datastore() for key in s3.list(filter_fun=_filter_fun): if not overwrite and not doc_needs_update(key): app.logger.debug('Skip importing {}'.format(key.name)) continue app.logger.info('Import new data from {}'.format(key.name)) job_name = get_meta_data(key.name, 'job_name') build_number = get_meta_data(key.name, 'build_number') uploader_number = get_meta_data(key.name, 'uploader_build_number') build_info = json.loads(key.get_contents_as_string()) artifacts = get_artifacts(build_info) test_result_path = get_test_path( artifacts, job_name, build_number, uploader_number) if not test_result_path: app.logger.error( "Test result file not found for key: {} ".format(key.name)) continue svg_path = make_path( artifacts, job_name, build_number, uploader_number, "result.svg") html_path = make_path( artifacts, job_name, build_number, uploader_number, "result.html") json_path = make_path( artifacts, job_name, build_number, uploader_number, "result.json") test_key = s3.get(test_result_path) test = json.loads(test_key.get_contents_as_string()) doc = make_doc(build_info, test, job_name, key, artifacts, svg_path, html_path, json_path) ds.update({'_id': _get_id(key)}, doc)
def from_s3(overwrite=False): """Import test results from S3 and insert it to database.""" s3 = S3.factory(bucket='juju-qa-data', directory='cwr') ds = Datastore() for key in s3.list(filter_fun=_filter_fun): if not overwrite and not doc_needs_update(key): app.logger.debug('Skip importing {}'.format(key.name)) continue app.logger.info('Import new data from {}'.format(key.name)) job_name = get_meta_data(key.name, 'job_name') build_number = get_meta_data(key.name, 'build_number') uploader_number = get_meta_data(key.name, 'uploader_build_number') build_info = json.loads(key.get_contents_as_string()) artifacts = get_artifacts(build_info) test_result_path = get_test_path(artifacts, job_name, build_number, uploader_number) if not test_result_path: app.logger.error("Test result file not found for key: {} ".format( key.name)) continue svg_path = get_svg_path(artifacts, job_name, build_number, uploader_number) test_key = s3.get(test_result_path) test = json.loads(test_key.get_contents_as_string()) doc = make_doc(build_info, test, job_name, key, artifacts, svg_path) ds.update({'_id': _get_id(key)}, doc)
def test_update(self): doc = make_doc() ds = Datastore() with patch('cwrstatus.datastore.get_current_utc_time', autospec=True, return_value=doc['_updated_on']) as gcut_mock: ds.update({"_id": doc["_id"]}, doc) items = list(self.ds.db.cwr.find()) self.assertEqual(items, [doc]) gcut_mock.assert_called_once_with()
def test_update_existing_doc(self): doc = make_doc() ds = Datastore() with patch('cwrstatus.datastore.get_current_utc_time', autospec=True, return_value=doc['_updated_on']): ds.update({"_id": doc["_id"]}, doc) items = list(self.ds.db.cwr.find()) self.assertEqual(items, [doc]) doc['bundle_name'] = 'new bundle' ds.update({"_id": doc["_id"]}, doc) items = list(self.ds.db.cwr.find()) self.assertEqual(items, [doc])