def test_post_new_attachment(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' attachment_id = 'B5B4ED80AC3B894523D72E375DACAA2FC6606C18EDF680FE95903086C8B5E14A' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' _file.filename = 'filename.txt' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment( 'some mocked value', 'some mocked type').thenReturn(defer.succeed(attachment_id)) d = self.web.get(request) def assert_response(_): self.assertEqual(201, request.code) self.assertEqual('/attachment/%s' % attachment_id, request.headers['Location']) response_json = { 'ident': attachment_id, 'content-type': 'some mocked type', 'name': 'filename.txt', 'size': 17, 'encoding': 'base64' } self.assertEqual(response_json, json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_annotation(self): harmonized_db = MagicMock() harmonized_db.get = lambda product_code, _: {"510k": [{"k_number": "K094035"}], \ "device_pma": [{"pma_number": "P950002"}], \ "registration": [{"fei_number": "3001451451"}], \ } if product_code == "OQG" else {} ann = UDIAnnotateMapper(harmonized_db) mapper = XML2JSONMapper() def add_fn(id, json): harmonized = ann.harmonize(json) eq_("OQG", harmonized["product_codes"][0]["code"]) eq_(None, harmonized["product_codes"][0]["openfda"].get("pma_number")) eq_(None, harmonized["product_codes"][0]["openfda"].get("k_number")) eq_(None, harmonized["product_codes"][0]["openfda"].get("fei_number")) eq_({}, harmonized["product_codes"][1].get("openfda")) map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test.xml") map_output = MagicMock() map_output.add = add_fn mapper.map_shard(map_input, map_output)
def test_download_fallback_cache(self, fetch_dist): """Downloading missing package caches result from fallback""" db = self.request.db = MagicMock() locator = self.request.locator = MagicMock() self.request.registry.fallback = "cache" self.request.fallback_simple = "https://pypi.org/simple" self.request.access.can_update_cache.return_value = True db.fetch.return_value = None fetch_dist.return_value = (MagicMock(), MagicMock()) context = MagicMock() context.filename = "package.tar.gz" url = "https://pypi.org/simple/%s" % context.filename dist = make_dist(url=url) locator.get_releases.return_value = [dist] ret = api.download_package(context, self.request) fetch_dist.assert_called_with( self.request, dist["url"], dist["name"], dist["version"], dist["summary"], dist["requires_python"], ) self.assertEqual(ret.body, fetch_dist()[1]) ret.headers.update.assert_any_call([("Cache-Control", "public, max-age=0")])
def mm(package_name): """ Mock packages for packages_to_dict """ p = MagicMock() p.filename = package_name p.get_url.return_value = package_name + ".ext" p.data = {} return p
def test_download_fallback_cache_max_age(self, fetch_dist): """ Downloading missing package caches result from fallback """ db = self.request.db = MagicMock() locator = self.request.locator = MagicMock() self.request.registry.fallback = "cache" self.request.fallback_simple = "https://pypi.org/simple" self.request.access.can_update_cache.return_value = True self.request.registry.package_max_age = 30 db.fetch.return_value = None fetch_dist.return_value = (MagicMock(), MagicMock()) context = MagicMock() context.filename = "package.tar.gz" dist = MagicMock() url = "https://pypi.org/simple/%s" % context.filename locator.get_project.return_value = { "0.1": dist, "urls": { "0.1": set([url]) } } ret = api.download_package(context, self.request) fetch_dist.assert_called_with(self.request, dist.name, url) self.assertEqual(ret.body, fetch_dist()[1]) ret.headers.update.assert_any_call([("Cache-Control", "public, max-age=30")])
def test_process_attachments_disallowed_extension(self): file = MagicMock() file.filename = 'foo.sh' files = {'file0': file } self.project.process_attachments(files, self.test_issue) self.assertEqual(len(self.project.attachments), 0)
def test_import_invalid_local_csv_file_ext_returns_none(self, mock_request): mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = 'sample.txt' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) return_value = form.get_import_data() assert return_value['type'] is 'localCSV' and return_value['csv_filename'] is None
def test_import_upload_path_ioerror(self, mock_user, mock_request, mock_uploader): mock_user.id = 1 mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = 'sample.csv' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) assert_raises(IOError, form.get_import_data)
def test_import_blank_local_csv_file_returns_none(self, mock_request): mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = '' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) return_value = form.get_import_data() assert return_value['type'] is 'localCSV' and return_value[ 'csv_filename'] is None
def test_import_upload_path_works(self, mock_user, mock_request): mock_user.id = 1 mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = 'sample.csv' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) return_value = form.get_import_data() assert return_value['type'] is 'localCSV', return_value assert 'user_1/sample.csv' in return_value['csv_filename'], return_value
def test_import_upload_path_works(self, mock_user, mock_request): mock_user.id = 1 mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = 'sample.csv' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) return_value = form.get_import_data() assert return_value['type'] is 'localCSV', return_value assert 'user_1/sample.csv' in return_value[ 'csv_filename'], return_value
def test_create_ok(self, m_File): m_file = MagicMock() m_request = MagicMock() data = b"DATA" filename = "filename" m_file.filename = filename m_file.file = io.BytesIO(data) m_request._params = {'files': m_file, 'json': '{"submitter": "cli"}'} m_file_obj = MagicMock() m_File.get_or_create.return_value = m_file_obj api_files_ext.create(m_request)
def test_process_attachments(self): file = MagicMock() file.filename = 'foo.jpg' files = {'file0': file } self.project.upload_file = MagicMock(return_value='12345') self.project.process_attachments(files, self.test_issue) attachment = self.project.attachments[0] self.assertEqual(attachment.file_id, '12345') self.assertEqual(attachment.project, self.project) self.assertEqual(attachment.parent, self.test_issue)
def test_add_files_ok(self, m_Scan, m_IrmaScanStatus, m_FileExt, m_File): m_file = MagicMock() m_request = MagicMock() scan_id = "whatever" data = b"DATA" filename = "filename" m_file.filename = filename m_file.file = io.BytesIO(data) m_request._params = {'files': m_file} result = api_scans.add_files(m_request, scan_id) m_Scan.load_from_ext_id.assert_called_once_with(scan_id, self.session) self.assertIsScan(result)
def test_create_ok(self, m_File): m_file = MagicMock() m_request = MagicMock() data = b"DATA" filename = "filename" m_file.filename = filename m_file.file = io.BytesIO(data) m_request._params = {'files': m_file, 'json': '{"submitter": "cli",' '"submitter_id": "undefined"}'} m_file_obj = MagicMock() m_File.get_or_create.return_value = m_file_obj api_files_ext.create(m_request)
def get(self, id): info = self._storage[id] from StringIO import StringIO f = MagicMock(wraps=StringIO(info['content'])) f.seek(0) f.public_url = '' f.filename = info['filename'] f.content_type = info['content_type'] f.content_length = len(info['content']) # needed to make JSON serializable, Mock objects are not f.last_modified = datetime(2012, 12, 30) return f
def test_import_upload_path_works(self, mock_user, mock_request, mock_upload): url = 'https://s3.amazonaws.com/bucket/hello.csv' patch_dict = {'S3_IMPORT_BUCKET': 'bucket'} with patch.dict(self.flask_app.config, patch_dict): mock_upload.return_value = url mock_user.id = 1 mock_request.method = 'POST' mock_file = MagicMock() mock_file.filename = 'sample.csv' mock_request.files = dict(file=mock_file) form = BulkTaskLocalCSVImportForm(**self.form_data) return_value = form.get_import_data() assert return_value['type'] is 'localCSV', return_value assert return_value['csv_filename'] == url, return_value
def test_upload_file(self, mock_insert): img = MagicMock() img.filename = 'fn' img.read.return_value = b'content' self.s3_accessor.return_value.insert_image_and_thumbnail.return_value = ( 'file_id', 'thumbnail_id') path = user_accessor.secure_filename(f'{self.mock_id.return_value}_fn') accessor = user_accessor.UserAccessor() res = accessor.upload_file('username', img) accessor.s3_accessor.insert_image_and_thumbnail.assert_called_once_with( b'content', filename=path) self.assertEqual(res, { 'img_path': path, 'file_id': 'file_id', 'thumbnail_id': 'thumbnail_id' }) mock_insert.assert_called_once()
def MockArbitraryBuffer(filetype, native_available=True): """Used via the with statement, set up mocked versions of the vim module such that a single buffer is open with an arbitrary name and arbirary contents. Its filetype is set to the supplied filetype""" with patch("vim.current") as vim_current: def VimEval(value): """Local mock of the vim.eval() function, used to ensure we get the correct behvaiour""" if value == "&omnifunc": # The omnicompleter is not required here return "" if value == 'getbufvar(0, "&mod")': # Ensure that we actually send the even to the server return 1 if value == 'getbufvar(0, "&ft")' or value == "&filetype": return filetype if value.startswith("bufnr("): return 0 if value.startswith("bufwinnr("): return 0 raise ValueError("Unexpected evaluation") # Arbitrary, but valid, cursor position vim_current.window.cursor = (1, 2) # Arbitrary, but valid, single buffer open current_buffer = MagicMock() current_buffer.number = 0 current_buffer.filename = os.path.realpath("TEST_BUFFER") current_buffer.name = "TEST_BUFFER" current_buffer.window = 0 # The rest just mock up the Vim module so that our single arbitrary buffer # makes sense to vimsupport module. with patch("vim.buffers", [current_buffer]): with patch("vim.current.buffer", current_buffer): with patch("vim.eval", side_effect=VimEval): yield
def MockArbitraryBuffer(filetype, native_available=True): """Used via the with statement, set up mocked versions of the vim module such that a single buffer is open with an arbitrary name and arbirary contents. Its filetype is set to the supplied filetype""" with patch('vim.current') as vim_current: def VimEval(value): """Local mock of the vim.eval() function, used to ensure we get the correct behvaiour""" if value == '&omnifunc': # The omnicompleter is not required here return '' if value == 'getbufvar(0, "&mod")': # Ensure that we actually send the even to the server return 1 if value == 'getbufvar(0, "&ft")' or value == '&filetype': return filetype if value.startswith('bufnr('): return 0 if value.startswith('bufwinnr('): return 0 raise ValueError('Unexpected evaluation') # Arbitrary, but valid, cursor position vim_current.window.cursor = (1, 2) # Arbitrary, but valid, single buffer open current_buffer = MagicMock() current_buffer.number = 0 current_buffer.filename = os.path.realpath('TEST_BUFFER') current_buffer.name = 'TEST_BUFFER' current_buffer.window = 0 # The rest just mock up the Vim module so that our single arbitrary buffer # makes sense to vimsupport module. with patch('vim.buffers', [current_buffer]): with patch('vim.current.buffer', current_buffer): with patch('vim.eval', side_effect=VimEval): yield
def test_empty_date_handling(self): mapper = XML2JSONMapper() map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test_blank_orig_date.xml") map_dict = {} def add(id, json): map_dict[id] = json map_output = MagicMock() map_output.add = add mapper.map_shard(map_input, map_output) uid = "USA-USFDACVM-2016-US-017128" ae = map_dict[uid] eq_(None, ae.get("original_receive_date"))
def test_download_fallback_cache_max_age(self, fetch_dist): """Downloading missing package caches result from fallback""" db = self.request.db = MagicMock() locator = self.request.locator = MagicMock() self.request.registry.fallback = "cache" self.request.fallback_simple = "https://pypi.org/simple" self.request.access.can_update_cache.return_value = True self.request.registry.package_max_age = 30 db.fetch.return_value = None fetch_dist.return_value = (MagicMock(), BytesIO(b"fds")) context = MagicMock() context.filename = "package.tar.gz" url = "https://pypi.org/simple/%s" % context.filename dist = make_dist(url=url) locator.get_releases.return_value = [dist] ret = api.download_package(context, self.request) fetch_dist.assert_called_once() self.assertEqual(ret.body, b"fds") self.assertDictContainsSubset({"Cache-Control": "public, max-age=30"}, ret.headers)
def test_non_numeric_denominator(self): mapper = XML2JSONMapper() map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test-non-numeric-denominator.xml") map_dict = {} def add(id, json): map_dict[id] = json map_output = MagicMock() map_output.add = add mapper.map_shard(map_input, map_output) uid = "USA-USFDACVM-2016-US-017128" ae = map_dict[uid] eq_(uid, ae["unique_aer_id_number"]) eq_(uid, ae["@id"]) eq_("0", ae["drug"][0]["dose"]["denominator"]) eq_("Unknown", ae["drug"][0]["dose"]["denominator_unit"])
def test_download_fallback_cache(self, fetch_dist): """ Downloading missing package caches result from fallback """ db = self.request.db = MagicMock() locator = self.request.locator = MagicMock() self.request.registry.fallback = 'cache' self.request.registry.fallback_url = 'http://pypi.com' self.request.access.can_update_cache.return_value = True db.fetch.return_value = None fetch_dist.return_value = (MagicMock(), MagicMock()) context = MagicMock() context.filename = 'package.tar.gz' dist = MagicMock() url = 'http://pypi.com/simple/%s' % context.filename locator.get_project.return_value = { '0.1': dist, 'urls': { '0.1': set([url]) } } ret = api.download_package(context, self.request) fetch_dist.assert_called_with(self.request, dist.name, url) self.assertEqual(ret.body, fetch_dist()[1])
def test_post_new_attachment(self, mock_fields): request = DummyRequest(['/attachment']) request.method = 'POST' request.content = 'mocked' attachment_id = 'B5B4ED80AC3B894523D72E375DACAA2FC6606C18EDF680FE95903086C8B5E14A' _file = MagicMock() _file.value = 'some mocked value' _file.type = 'some mocked type' _file.filename = 'filename.txt' mock_fields.return_value = {'attachment': _file} when(self.mail_service).save_attachment('some mocked value', 'some mocked type').thenReturn(defer.succeed(attachment_id)) d = self.web.get(request) def assert_response(_): self.assertEqual(201, request.code) self.assertEqual('/attachment/%s' % attachment_id, request.responseHeaders.getRawHeaders("location")[0]) response_json = {'ident': attachment_id, 'content-type': 'some mocked type', 'name': 'filename.txt', 'size': 17, 'encoding': 'base64'} self.assertEqual(response_json, json.loads(request.written[0])) verify(self.mail_service).save_attachment('some mocked value', 'some mocked type') d.addCallback(assert_response) return d
def test_xml_to_json(self): mapper = XML2JSONMapper() map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test.xml") def add_fn(id, json): eq_("gs1_00844588018923", id) eq_("gs1_00844588018923", json["@id"]) eq_("00844588018923", json["identifiers"][0]["id"]) eq_("Primary", json["identifiers"][0]["type"]) eq_("GS1", json["identifiers"][0]["issuing_agency"]) eq_("00844868017264", json["identifiers"][1]["id"]) eq_("Package", json["identifiers"][1]["type"]) eq_("GS1", json["identifiers"][1]["issuing_agency"]) eq_("00844868017288", json["identifiers"][1]["unit_of_use_id"]) eq_("25", json["identifiers"][1]["quantity_per_package"]) eq_("2015-11-11", json["identifiers"][1]["package_discontinue_date"]) eq_("Not in Commercial Distribution", json["identifiers"][1]["package_status"]) eq_("case", json["identifiers"][1]["package_type"]) eq_("Published", json["record_status"]) eq_("2015-09-24", json["publish_date"]) eq_("New", json["public_version_status"]) eq_("33e1d2ec-8555-43d0-8421-b208e7185d06", json["public_device_record_key"]) eq_("1", json["public_version_number"]) eq_("2017-12-19", json["public_version_date"]) eq_("123456789", json["labeler_duns_number"]) eq_("2018-09-24", json["commercial_distribution_end_date"]) eq_("In Commercial Distribution", json["commercial_distribution_status"]) eq_("CS2 Acet. Cup Sys. - VitalitE", json["brand_name"]) eq_("1107-0-3258", json["version_or_model_number"]) eq_("1107-0-3258", json["catalog_number"]) eq_("CONSENSUS ORTHOPEDICS, INC.", json["company_name"]) eq_("1", json["device_count_in_base_package"]) eq_("Acet. Insert, VitalitE", json["device_description"]) eq_("true", json["is_direct_marking_exempt"]) eq_("false", json["is_pm_exempt"]) eq_("false", json["is_hct_p"]) eq_("false", json["is_kit"]) eq_("true", json["is_combination_product"]) eq_("true", json["is_single_use"]) eq_("true", json["has_lot_or_batch_number"]) eq_("false", json["has_serial_number"]) eq_("false", json["has_manufacturing_date"]) eq_("true", json["has_expiration_date"]) eq_("false", json["has_donation_id_number"]) eq_("false", json["is_labeled_as_nrl"]) eq_("true", json["is_labeled_as_no_nrl"]) eq_("true", json["is_rx"]) eq_("false", json["is_otc"]) eq_("Labeling does not contain MRI Safety Information", json["mri_safety"]) eq_("+1(916)355-7100", json["customer_contacts"][0]["phone"]) eq_("ext555", json["customer_contacts"][0]["ext"]) eq_("*****@*****.**", json["customer_contacts"][0]["email"]) eq_("+1 (555) 555-5555", json["customer_contacts"][1]["phone"]) eq_(None, json["customer_contacts"][1].get("ext")) eq_("*****@*****.**", json["customer_contacts"][1]["email"]) eq_("Non-constrained polyethylene acetabular liner", json["gmdn_terms"][0]["name"]) eq_( "A sterile, implantable component of a two-piece acetabulum prosthesis that is inserted\n into an acetabular shell prosthesis to provide the articulating surface with a femoral head\n prosthesis as part of a total hip arthroplasty (THA). It is made of polyethylene (includes hylamer,\n cross-linked polyethylene), and does not include a stabilizing component to limit the range of\n motion of the hip.", json["gmdn_terms"][0]["definition"]) eq_("Bone-screw internal spinal fixation system, non-sterile", json["gmdn_terms"][1]["name"]) eq_( "An assembly of non-sterile implantable devices intended to provide immobilization and\n stabilization of spinal segments in the treatment of various spinal instabilities or deformities,\n also used as an adjunct to spinal fusion [e.g., for degenerative disc disease (DDD)]. Otherwise\n known as a pedicle screw instrumentation system, it typically consists of a combination of anchors\n (e.g., bolts, hooks, pedicle screws or other types), interconnection mechanisms (incorporating nuts,\n screws, sleeves, or bolts), longitudinal members (e.g., plates, rods, plate/rod combinations),\n and/or transverse connectors. Non-sterile disposable devices associated with implantation may be\n included.", json["gmdn_terms"][1]["definition"]) eq_("OQG", json["product_codes"][0]["code"]) eq_( "Hip Prosthesis, semi-constrained, cemented, metal/polymer, + additive, porous,\n uncemented", json["product_codes"][0]["name"]) eq_("MAX", json["product_codes"][1]["code"]) eq_("Intervertebral fusion device with bone graft, lumbar", json["product_codes"][1]["name"]) eq_("Millimeter", json["device_sizes"][0]["unit"]) eq_("32/6", json["device_sizes"][0]["value"]) eq_("Outer Diameter", json["device_sizes"][0]["type"]) eq_("Size test here", json["device_sizes"][0]["text"]) eq_(None, json["device_sizes"][1].get("unit")) eq_(None, json["device_sizes"][1].get("value")) eq_("Device Size Text, specify", json["device_sizes"][1]["type"]) eq_(unicode("SPACER LAT PEEK 8° 40L X 18W X 10H", encoding="utf-8"), json["device_sizes"][1]["text"]) eq_("Storage Environment Temperature", json["storage"][0]["type"]) eq_("Degrees Celsius", json["storage"][0]["high"]["unit"]) eq_("8", json["storage"][0]["high"]["value"]) eq_("Degrees Celsius", json["storage"][0]["low"]["unit"]) eq_("-30", json["storage"][0]["low"]["value"]) eq_(None, json["storage"][0].get("special_conditions")) eq_("Special Storage Condition, Specify", json["storage"][1]["type"]) eq_("This device must be stored in a dry location away from temperature\n extremes", json["storage"][1].get("special_conditions")) eq_(None, json["storage"][1].get("high")) eq_(None, json["storage"][1].get("low")) eq_("false", json["sterilization"]["is_sterile"]) eq_("true", json["sterilization"]["is_sterilization_prior_use"]) eq_("Moist Heat or Steam Sterilization", json["sterilization"]["sterilization_methods"]) eq_(["3001451451", "12223430908"], json["fei_number"]) eq_("K094035", json["premarket_submissions"][0]["submission_number"]) eq_("000", json["premarket_submissions"][0]["supplement_number"]) eq_("PMN", json["premarket_submissions"][0]["submission_type"]) eq_("P950002", json["premarket_submissions"][1]["submission_number"]) eq_("001", json["premarket_submissions"][1]["supplement_number"]) eq_("PMA", json["premarket_submissions"][1]["submission_type"]) map_output = MagicMock() map_output.add = add_fn mapper.map_shard(map_input, map_output)
def test_xml_to_json(self): mapper = XML2JSONMapper() map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test.xml") def add_fn(id, json): eq_("gs1_00844588018923", id) eq_("gs1_00844588018923", json["@id"]) eq_("00844588018923", json["identifiers"][0]["id"]) eq_("Primary", json["identifiers"][0]["type"]) eq_("GS1", json["identifiers"][0]["issuing_agency"]) eq_("00844868017264", json["identifiers"][1]["id"]) eq_("Package", json["identifiers"][1]["type"]) eq_("GS1", json["identifiers"][1]["issuing_agency"]) eq_("00844868017288", json["identifiers"][1]["unit_of_use_id"]) eq_("25", json["identifiers"][1]["quantity_per_package"]) eq_("2015-11-11", json["identifiers"][1]["package_discontinue_date"]) eq_("Not in Commercial Distribution", json["identifiers"][1]["package_status"]) eq_("case", json["identifiers"][1]["package_type"]) eq_("Published", json["record_status"]) eq_("2015-09-24", json["publish_date"]) eq_("2018-09-24", json["commercial_distribution_end_date"]) eq_("In Commercial Distribution", json["commercial_distribution_status"]) eq_("CS2 Acet. Cup Sys. - VitalitE", json["brand_name"]) eq_("1107-0-3258", json["version_or_model_number"]) eq_("1107-0-3258", json["catalog_number"]) eq_("CONSENSUS ORTHOPEDICS, INC.", json["company_name"]) eq_("1", json["device_count_in_base_package"]) eq_("Acet. Insert, VitalitE", json["device_description"]) eq_("true", json["is_direct_marking_exempt"]) eq_("false", json["is_pm_exempt"]) eq_("false", json["is_hct_p"]) eq_("false", json["is_kit"]) eq_("true", json["is_combination_product"]) eq_("true", json["is_single_use"]) eq_("true", json["has_lot_or_batch_number"]) eq_("false", json["has_serial_number"]) eq_("false", json["has_manufacturing_date"]) eq_("true", json["has_expiration_date"]) eq_("false", json["has_donation_id_number"]) eq_("false", json["is_labeled_as_nrl"]) eq_("true", json["is_labeled_as_no_nrl"]) eq_("true", json["is_rx"]) eq_("false", json["is_otc"]) eq_("Labeling does not contain MRI Safety Information", json["mri_safety"]) eq_("+1(916)355-7100", json["customer_contacts"][0]["phone"]) eq_("ext555", json["customer_contacts"][0]["ext"]) eq_("*****@*****.**", json["customer_contacts"][0]["email"]) eq_("+1 (555) 555-5555", json["customer_contacts"][1]["phone"]) eq_(None, json["customer_contacts"][1].get("ext")) eq_("*****@*****.**", json["customer_contacts"][1]["email"]) eq_("Non-constrained polyethylene acetabular liner", json["gmdn_terms"][0]["name"]) eq_( "A sterile, implantable component of a two-piece acetabulum prosthesis that is inserted\n into an acetabular shell prosthesis to provide the articulating surface with a femoral head\n prosthesis as part of a total hip arthroplasty (THA). It is made of polyethylene (includes hylamer,\n cross-linked polyethylene), and does not include a stabilizing component to limit the range of\n motion of the hip.", json["gmdn_terms"][0]["definition"]) eq_("Bone-screw internal spinal fixation system, non-sterile", json["gmdn_terms"][1]["name"]) eq_( "An assembly of non-sterile implantable devices intended to provide immobilization and\n stabilization of spinal segments in the treatment of various spinal instabilities or deformities,\n also used as an adjunct to spinal fusion [e.g., for degenerative disc disease (DDD)]. Otherwise\n known as a pedicle screw instrumentation system, it typically consists of a combination of anchors\n (e.g., bolts, hooks, pedicle screws or other types), interconnection mechanisms (incorporating nuts,\n screws, sleeves, or bolts), longitudinal members (e.g., plates, rods, plate/rod combinations),\n and/or transverse connectors. Non-sterile disposable devices associated with implantation may be\n included.", json["gmdn_terms"][1]["definition"]) eq_("OQG", json["product_codes"][0]["code"]) eq_( "Hip Prosthesis, semi-constrained, cemented, metal/polymer, + additive, porous,\n uncemented", json["product_codes"][0]["name"]) eq_("MAX", json["product_codes"][1]["code"]) eq_("Intervertebral fusion device with bone graft, lumbar", json["product_codes"][1]["name"]) eq_("Millimeter", json["device_sizes"][0]["unit"]) eq_("32/6", json["device_sizes"][0]["value"]) eq_("Outer Diameter", json["device_sizes"][0]["type"]) eq_("Size test here", json["device_sizes"][0]["text"]) eq_(None, json["device_sizes"][1].get("unit")) eq_(None, json["device_sizes"][1].get("value")) eq_("Device Size Text, specify", json["device_sizes"][1]["type"]) eq_( unicode("SPACER LAT PEEK 8° 40L X 18W X 10H", encoding="utf-8"), json["device_sizes"][1]["text"]) eq_("Storage Environment Temperature", json["storage"][0]["type"]) eq_("Degrees Celsius", json["storage"][0]["high"]["unit"]) eq_("8", json["storage"][0]["high"]["value"]) eq_("Degrees Celsius", json["storage"][0]["low"]["unit"]) eq_("-30", json["storage"][0]["low"]["value"]) eq_(None, json["storage"][0].get("special_conditions")) eq_("Special Storage Condition, Specify", json["storage"][1]["type"]) eq_( "This device must be stored in a dry location away from temperature\n extremes", json["storage"][1].get("special_conditions")) eq_(None, json["storage"][1].get("high")) eq_(None, json["storage"][1].get("low")) eq_("false", json["sterilization"]["is_sterile"]) eq_("true", json["sterilization"]["is_sterilization_prior_use"]) eq_("Moist Heat or Steam Sterilization", json["sterilization"]["sterilization_methods"]) eq_(["3001451451", "12223430908"], json["fei_number"]) eq_("K094035", json["pma_submissions"][0]["submission_number"]) eq_("000", json["pma_submissions"][0]["supplement_number"]) eq_("PMN", json["pma_submissions"][0]["submission_type"]) eq_("P950002", json["pma_submissions"][1]["submission_number"]) eq_("001", json["pma_submissions"][1]["supplement_number"]) eq_("PMA", json["pma_submissions"][1]["submission_type"]) map_output = MagicMock() map_output.add = add_fn mapper.map_shard(map_input, map_output)
def test_xml_to_json(self): mapper = XML2JSONMapper() map_input = MagicMock() map_input.filename = os.path.join(dirname(os.path.abspath(__file__)), "test.xml") map_dict = {} def add(id, json): map_dict[id] = json map_output = MagicMock() map_output.add = add mapper.map_shard(map_input, map_output) uid = "USA-USFDACVM-2016-US-017128" ae = map_dict[uid] eq_(uid, ae["unique_aer_id_number"]) eq_(uid, ae["@id"]) eq_("N141203", ae["report_id"]) eq_("20160502", ae["original_receive_date"]) eq_("Food and Drug Administration Center for Veterinary Medicine", ae["receiver"]["organization"]) eq_("7500 Standish Place (HFV-210) Room N403", ae["receiver"]["street_address"]) eq_("Rockville", ae["receiver"]["city"]) eq_("MD", ae["receiver"]["state"]) eq_("20855", ae["receiver"]["postal_code"]) eq_("USA", ae["receiver"]["country"]) eq_("Other", ae["primary_reporter"]) eq_("Animal Owner", ae["secondary_reporter"]) eq_("Safety Issue", ae["type_of_information"]) eq_("true", ae["serious_ae"]) eq_("2", ae["number_of_animals_treated"]) eq_("3", ae["number_of_animals_affected"]) eq_("Dog", ae["animal"]["species"]) eq_("Male", ae["animal"]["gender"]) eq_("Neutered", ae["animal"]["reproductive_status"]) eq_("NOT APPLICABLE", ae["animal"]["female_animal_physiological_status"]) eq_("7.00", ae["animal"]["age"]["min"]) eq_("17.5", ae["animal"]["age"]["max"]) eq_("Year", ae["animal"]["age"]["unit"]) eq_("Measured", ae["animal"]["age"]["qualifier"]) eq_("6.123", ae["animal"]["weight"]["min"]) eq_("16.123", ae["animal"]["weight"]["max"]) eq_("Kilogram", ae["animal"]["weight"]["unit"]) eq_("Measured", ae["animal"]["weight"]["qualifier"]) eq_("false", ae["animal"]["breed"]["is_crossbred"]) eq_("Terrier - Yorkshire", ae["animal"]["breed"]["breed_component"]) eq_("Ongoing", ae["outcome"][0]["medical_status"]) eq_("1", ae["outcome"][0]["number_of_animals_affected"]) eq_("Good", ae["health_assessment_prior_to_exposure"]["condition"]) eq_("Veterinarian", ae["health_assessment_prior_to_exposure"]["assessed_by"]) eq_("20150601", ae["onset_date"]) eq_("14", ae["duration"]["value"]) eq_("Month", ae["duration"]["unit"]) eq_("11", ae["reaction"][0]["veddra_version"]) eq_("2227", ae["reaction"][0]["veddra_term_code"]) eq_("Dental disease", ae["reaction"][0]["veddra_term_name"]) eq_("1", ae["reaction"][0]["number_of_animals_affected"]) eq_("Actual", ae["reaction"][0]["accuracy"]) eq_("11", ae["reaction"][1]["veddra_version"]) eq_("1026", ae["reaction"][1]["veddra_term_code"]) eq_("Localised pain NOS (see other 'SOCs' for specific pain)", ae["reaction"][1]["veddra_term_name"]) eq_("1", ae["reaction"][1]["number_of_animals_affected"]) eq_("Actual", ae["reaction"][1]["accuracy"]) eq_("2", ae["reaction"][13]["veddra_version"]) eq_("99115", ae["reaction"][13]["veddra_term_code"]) eq_("INEFFECTIVE, HEARTWORM LARVAE", ae["reaction"][13]["veddra_term_name"]) eq_("1", ae["reaction"][13]["number_of_animals_affected"]) eq_("Actual", ae["reaction"][13]["accuracy"]) eq_("10 days", ae["time_between_exposure_and_onset"]) eq_("true", ae["treated_for_ae"]) eq_(8, len(ae["drug"])) eq_("20150601", ae["drug"][0]["first_exposure_date"]) eq_("20151201", ae["drug"][0]["last_exposure_date"]) eq_("1", ae["drug"][0]["frequency_of_administration"]["value"]) eq_("Day", ae["drug"][0]["frequency_of_administration"]["unit"]) eq_("Animal Owner", ae["drug"][0]["administered_by"]) eq_("Oral", ae["drug"][0]["route"]) eq_("0.50", ae["drug"][0]["dose"]["numerator"]) eq_("tablet", ae["drug"][0]["dose"]["numerator_unit"]) eq_("1", ae["drug"][0]["dose"]["denominator"]) eq_("Unknown", ae["drug"][0]["dose"]["denominator_unit"]) eq_("false", ae["drug"][0]["used_according_to_label"]) eq_([u'Route Off-Label', u'Underdosed'], ae["drug"][0]["off_label_use"]) eq_("false", ae["drug"][0]["previous_exposure_to_drug"]) eq_("false", ae["drug"][0]["previous_ae_to_drug"]) eq_("false", ae["drug"][0]["ae_abated_after_stopping_drug"]) eq_("true", ae["drug"][0]["ae_reappeared_after_resuming_drug"]) eq_("20160101", ae["drug"][0]["manufacturing_date"]) eq_("71423", ae["drug"][0]["lot_number"]) eq_("20180228", ae["drug"][0]["lot_expiration"]) eq_("1111-2222", ae["drug"][0]["product_ndc"]) eq_("Deramaxx Chewable Tablets", ae["drug"][0]["brand_name"]) eq_("Tablet", ae["drug"][0]["dosage_form"]) eq_("Elanco US Inc", ae["drug"][0]["manufacturer"]["name"]) eq_("USA-USFDACVM-N141203", ae["drug"][0]["manufacturer"]["registration_number"]) eq_("1", ae["drug"][0]["number_of_defective_items"]) eq_("11", ae["drug"][0]["number_of_items_returned"]) eq_("QM01AH94", ae["drug"][0]["atc_vet_code"]) eq_("Deracoxib", ae["drug"][0]["active_ingredients"][0]["name"]) eq_("25", ae["drug"][0]["active_ingredients"][0]["dose"]["numerator"]) eq_("Milligram", ae["drug"][0]["active_ingredients"][0]["dose"]["numerator_unit"]) eq_("1", ae["drug"][0]["active_ingredients"][0]["dose"]["denominator"]) eq_("dose", ae["drug"][0]["active_ingredients"][0]["dose"]["denominator_unit"])
def mm(package_name): """ Mock packages for packages_to_dict """ p = MagicMock() p.filename = package_name p.get_url.return_value = package_name + ".ext" return p