def calculate_digest(_, message): # Return some nonsense. response = { "digest": factory.make_bytes(), "salt": factory.make_bytes(), } return succeed(response)
def test_arranges_for_update_on_BootSource_create(self): post_commit_do = self.patch(signals.bootsources, "post_commit_do") factory.make_BootSource(keyring_data=factory.make_bytes()) factory.make_BootSource(keyring_data=factory.make_bytes()) self.assertThat( post_commit_do, MockCalledWith(reactor.callLater, 0, cache_boot_sources))
def test_bulk_set_user_data_with_preexisting_data(self): nodes = [factory.make_Node() for _ in range(2)] data1 = factory.make_bytes() NodeUserData.objects.bulk_set_user_data(nodes, data1) nodes.extend(factory.make_Node() for _ in range(3)) data2 = factory.make_bytes() NodeUserData.objects.bulk_set_user_data(nodes, data2) for node in nodes: self.assertEqual(data2, NodeUserData.objects.get_user_data(node))
def test_arranges_for_update_always_when_empty(self): self.patch(signals.bootsources, "post_commit_do") # Create then delete a boot source cache to get over initial ignore # on create. boot_source = factory.make_BootSource( keyring_data=factory.make_bytes()) boot_source.delete() post_commit_do = self.patch(signals.bootsources, "post_commit_do") factory.make_BootSource(keyring_data=factory.make_bytes()) self.assertThat( post_commit_do, MockCalledOnceWith(reactor.callLater, 0, cache_boot_sources))
def test_store_result_allows_controllers_to_overwrite(self): node = factory.make_Node(node_type=random.choice([ NODE_TYPE.REGION_AND_RACK_CONTROLLER, NODE_TYPE.REGION_CONTROLLER, NODE_TYPE.RACK_CONTROLLER, ])) script_set = factory.make_ScriptSet(node=node) script_result = factory.make_ScriptResult(script_set=script_set, status=SCRIPT_STATUS.PASSED) exit_status = random.randint(0, 255) output = factory.make_bytes() stdout = factory.make_bytes() stderr = factory.make_bytes() result = factory.make_bytes() script_result.store_result( random.randint(0, 255), factory.make_bytes(), factory.make_bytes(), factory.make_bytes(), factory.make_bytes(), ) script_result.store_result(exit_status, output, stdout, stderr, result) self.assertEqual(exit_status, script_result.exit_status) self.assertEqual(output, script_result.output) self.assertEqual(stdout, script_result.stdout) self.assertEqual(stderr, script_result.stderr) self.assertEqual(result, script_result.result)
def test_store_result_allows_pod_to_overwrite(self): pod = factory.make_Pod() node = factory.make_Node() script_set = factory.make_ScriptSet(node=node) script_result = factory.make_ScriptResult(script_set=script_set, status=SCRIPT_STATUS.PASSED) pod.hints.nodes.add(node) exit_status = random.randint(0, 255) output = factory.make_bytes() stdout = factory.make_bytes() stderr = factory.make_bytes() result = factory.make_bytes() script_result.store_result( random.randint(0, 255), factory.make_bytes(), factory.make_bytes(), factory.make_bytes(), factory.make_bytes(), ) script_result.store_result(exit_status, output, stdout, stderr, result) self.assertEqual(exit_status, script_result.exit_status) self.assertEqual(output, script_result.output) self.assertEqual(stdout, script_result.stdout) self.assertEqual(stderr, script_result.stderr) self.assertEqual(result, script_result.result)
def test_progress_accumulates_all_files(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) final_size = 0 final_total_size = 0 sizes = [random.randint(512, 1024) for _ in range(3)] total_sizes = [random.randint(1025, 2048) for _ in range(3)] types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for size in sizes: final_size += size total_size = total_sizes.pop() final_total_size += total_size filetype = types.pop() content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content=content, size=total_size) factory.make_BootResourceFile(resource_set, largefile, filename=filetype, filetype=filetype) progress = 100.0 * final_size / float(final_total_size) self.assertAlmostEqual(progress, resource_set.progress)
def test_content(self): size = randint(512, 1024) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=size) with largefile.content.open("rb") as stream: data = stream.read() self.assertEqual(content, data)
def test_status_installation_failure_fails_script_result(self): # Regression test for LP:1701352 user = factory.make_User() node = factory.make_Node( interface=True, status=NODE_STATUS.DEPLOYING, owner=user) node.current_installation_script_set = factory.make_ScriptSet( node=node, result_type=RESULT_TYPE.INSTALLATION) node.save() script_result = factory.make_ScriptResult( script_set=node.current_installation_script_set, script_name=CURTIN_INSTALL_LOG, status=SCRIPT_STATUS.RUNNING) content = factory.make_bytes() payload = { 'event_type': 'finish', 'result': 'FAILURE', 'origin': 'curtin', 'name': 'cmd-install', 'description': 'Command Install', 'timestamp': datetime.utcnow(), 'files': [ { "path": CURTIN_INSTALL_LOG, "encoding": "base64", "content": encode_as_base64(content), } ] } self.processMessage(node, payload) self.assertEqual( SCRIPT_STATUS.FAILED, reload_object(script_result).status)
def make_empty_resource_file(self, rtype=None, content=None): # Create a largefile to use the generated content, # sha256, and total_size. if content is None: content = factory.make_bytes(1024) total_size = len(content) largefile = factory.make_LargeFile(content=content, size=total_size) sha256 = largefile.sha256 with largefile.content.open('rb') as stream: content = stream.read() with post_commit_hooks: largefile.delete() # Empty largefile largeobject = LargeObjectFile() largeobject.open().close() largefile = LargeFile.objects.create(sha256=sha256, total_size=total_size, content=largeobject) if rtype is None: rtype = BOOT_RESOURCE_TYPE.UPLOADED resource = factory.make_BootResource(rtype=rtype) resource_set = factory.make_BootResourceSet(resource) rfile = factory.make_BootResourceFile(resource_set, largefile) return rfile, content
def test_PUT_returns_bad_request_when_content_doesnt_match_sha256(self): self.become_admin() rfile, content = self.make_empty_resource_file() content = factory.make_bytes(size=len(content)) response = self.client.put( self.get_boot_resource_file_upload_uri(rfile), data=content) self.assertEqual(http.client.BAD_REQUEST, response.status_code, response.content)
def test_get_or_create_file_from_content_returns_new_largefile(self): content = factory.make_bytes(1024) largefile = LargeFile.objects.get_or_create_file_from_content( BytesIO(content)) with largefile.content.open("rb") as stream: written_content = stream.read() self.assertEqual(content, written_content) self.assertEqual(len(content), largefile.size)
def test_with_exit_calls_close(self): data = factory.make_bytes() large_object = LargeObjectFile() with large_object.open("wb") as stream: self.addCleanup(large_object.close) mock_close = self.patch(large_object, "close") stream.write(data) self.assertThat(mock_close, MockCalledOnceWith())
def test_unlink(self): data = factory.make_bytes() large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) oid = large_object.oid large_object.unlink() self.assertEqual(0, large_object.oid) self.assertRaises(OperationalError, connection.connection.lobject, oid)
def test_store_result_stores_output(self): script_result = factory.make_ScriptResult(status=SCRIPT_STATUS.RUNNING) exit_status = random.randint(0, 255) output = factory.make_bytes() script_result.store_result(exit_status, output=output) self.assertEquals(exit_status, script_result.exit_status) self.assertEquals(output, script_result.output)
def test_store_result_stores_stderr(self): script_result = factory.make_ScriptResult(status=SCRIPT_STATUS.RUNNING) exit_status = random.randint(0, 255) stderr = factory.make_bytes() script_result.store_result(exit_status, stderr=stderr) self.assertEquals(exit_status, script_result.exit_status) self.assertEquals(stderr, script_result.stderr)
def test_authenticate_calculates_digest_with_salt(self): message = factory.make_bytes() secret = yield get_shared_secret() args = {"message": message} response = yield call_responder(Region(), Authenticate, args) digest = response["digest"] salt = response["salt"] expected_digest = HMAC(secret, message + salt, sha256).digest() self.assertEqual(expected_digest, digest) self.assertThat(salt, HasLength(16))
def test_doesnt_update_on_initial_BootSource_create(self): # The way MAAS detects if the BootSource is the initial creation is by # looking at its id. Since Postgres always increments the id only the # initial BootSource create(default) will have id=1. When running # multiple tests the database may be rolled back but Postgres still # increments ids as normal. This resets the sequence to 1. with connection.cursor() as cursor: cursor.execute("ALTER SEQUENCE %s_id_seq RESTART WITH 1" % BootSource._meta.db_table) post_commit_do = self.patch(signals.bootsources, "post_commit_do") factory.make_BootSource(keyring_data=factory.make_bytes()) self.assertThat(post_commit_do, MockNotCalled())
def test_stores_data(self): data = factory.make_bytes() test_name = factory.make_name("name") test_instance = LargeObjectFieldModel(name=test_name) large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) test_instance.large_object = large_object test_instance.save() test_instance = LargeObjectFieldModel.objects.get(name=test_name) with test_instance.large_object.open("rb") as stream: saved_data = stream.read() self.assertEqual(data, saved_data)
def test_store_result_stores_stdout(self): script_result = factory.make_ScriptResult(status=SCRIPT_STATUS.RUNNING) exit_status = random.randint(0, 255) stdout = factory.make_bytes() script_result.store_result(exit_status, stdout=stdout) self.assertEquals(exit_status, script_result.exit_status) self.assertEquals(b'', script_result.output) self.assertEquals(stdout, script_result.stdout) self.assertEquals(b'', script_result.stderr) self.assertEquals(b'', script_result.result) self.assertEquals(script_result.script.script, script_result.script_version)
def test_boot_resource_set_to_dict(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) total_size = random.randint(1024, 2048) content = factory.make_bytes(random.randint(512, 1023)) largefile = factory.make_LargeFile(content=content, size=total_size) rfile = factory.make_BootResourceFile(resource_set, largefile) dict_representation = boot_resource_set_to_dict(resource_set) self.assertEqual(resource_set.version, dict_representation['version']) self.assertEqual(resource_set.label, dict_representation['label']) self.assertEqual(resource_set.total_size, dict_representation['size']) self.assertEqual(False, dict_representation['complete']) self.assertEqual(resource_set.progress, dict_representation['progress']) self.assertEqual(boot_resource_file_to_dict(rfile), dict_representation['files'][rfile.filename])
def test_interates_on_block_size(self): # String size is multiple of block_size in the testing model data = factory.make_bytes(10 * 2) test_name = factory.make_name("name") test_instance = LargeObjectFieldModel(name=test_name) large_object = LargeObjectFile() with large_object.open("wb") as stream: stream.write(data) test_instance.large_object = large_object test_instance.save() test_instance = LargeObjectFieldModel.objects.get(name=test_name) with test_instance.large_object.open("rb") as stream: offset = 0 for block in stream: self.assertEqual(data[offset:offset + 10], block) offset += 10
def test_PUT_with_multiple_requests_and_large_content(self): prevent_scheduling_of_image_imports(self) self.become_admin() # Get large amount of data to test with content = factory.make_bytes(1 << 24) # 16MB rfile, _ = self.make_empty_resource_file(content=content) split_content = [ content[i:i + (1 << 22)] for i in range(0, len(content), 1 << 22) # Loop a total of 4 times ] for send_content in split_content: response = self.client.put( self.get_boot_resource_file_upload_uri(rfile), data=send_content) self.assertEqual(http.client.OK, response.status_code, response.content) self.assertEqual(content, self.read_content(rfile))
def test_boot_resource_file_to_dict(self): size = random.randint(512, 1023) total_size = random.randint(1024, 2048) content = factory.make_bytes(size) largefile = factory.make_LargeFile(content=content, size=total_size) resource = factory.make_BootResource(rtype=BOOT_RESOURCE_TYPE.UPLOADED) resource_set = factory.make_BootResourceSet(resource) rfile = factory.make_BootResourceFile(resource_set, largefile) dict_representation = boot_resource_file_to_dict(rfile) self.assertEqual(rfile.filename, dict_representation['filename']) self.assertEqual(rfile.filetype, dict_representation['filetype']) self.assertEqual(rfile.largefile.sha256, dict_representation['sha256']) self.assertEqual(total_size, dict_representation['size']) self.assertEqual(False, dict_representation['complete']) self.assertEqual(rfile.largefile.progress, dict_representation['progress']) self.assertEqual( reverse('boot_resource_file_upload_handler', args=[resource.id, rfile.id]), dict_representation['upload_uri'])
def test_size(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) final_size = 0 sizes = [random.randint(512, 1024) for _ in range(3)] total_sizes = [random.randint(1025, 2048) for _ in range(3)] types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for size in sizes: final_size += size filetype = types.pop() content = factory.make_bytes(size=size) largefile = factory.make_LargeFile( content=content, size=total_sizes.pop()) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) self.assertEqual(final_size, resource_set.size)
def test_complete_returns_false_for_one_incomplete_file(self): resource = factory.make_BootResource() resource_set = factory.make_BootResourceSet(resource) types = [ BOOT_RESOURCE_FILE_TYPE.ROOT_IMAGE, BOOT_RESOURCE_FILE_TYPE.BOOT_KERNEL, BOOT_RESOURCE_FILE_TYPE.BOOT_INITRD, ] for _ in range(2): filetype = types.pop() factory.make_boot_resource_file_with_content( resource_set, filename=filetype, filetype=filetype) size = random.randint(512, 1024) total_size = random.randint(1025, 2048) filetype = types.pop() content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content=content, size=total_size) factory.make_BootResourceFile( resource_set, largefile, filename=filetype, filetype=filetype) self.assertFalse(resource_set.complete)
def make_BootSource(): """Return a `BootSource` with random keyring data.""" return factory.make_BootSource(keyring_data=factory.make_bytes())
def test_complete_returns_False_when_content_incomplete(self): size = randint(512, 1024) total_size = randint(1025, 2048) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=total_size) self.assertFalse(largefile.complete)
def test_progress(self): size = randint(512, 1024) total_size = randint(1025, 2048) content = factory.make_bytes(size=size) largefile = factory.make_LargeFile(content, size=total_size) self.assertEqual(total_size / float(size), largefile.progress)
def test_bulk_set_user_data(self): nodes = [factory.make_Node() for _ in range(5)] data = factory.make_bytes() NodeUserData.objects.bulk_set_user_data(nodes, data) for node in nodes: self.assertEqual(data, NodeUserData.objects.get_user_data(node))