def MarkImageToBeSigned(ctx, tbs_base, insns_path, priority): """Mark an instructions file for signing. This will upload a file to the GS bucket flagging an image for signing by the signers. Args: ctx: A viable gs.GSContext. tbs_base: The full path to where the tobesigned directory lives. insns_path: The path (relative to |tbs_base|) of the file to sign. priority: Set the signing priority (lower == higher prio). Returns: The full path to the remote tobesigned file. """ if priority < 0 or priority > 99: raise ValueError('priority must be [0, 99] inclusive') if insns_path.startswith(tbs_base): insns_path = insns_path[len(tbs_base):].lstrip('/') tbs_path = '%s/tobesigned/%02i,%s' % (tbs_base, priority, insns_path.replace('/', ',')) # The caller will catch gs.GSContextException for us. ctx.Copy('-', tbs_path, input=cros_build_lib.MachineDetails()) return tbs_path
def __init__(self, gs_path, lock_timeout_mins=120, dry_run=False, ctx=None): """Initializer for the lock. Args: gs_path: Path to the potential GS file we use for lock management. lock_timeout_mins: How long should an existing lock be considered valid? This timeout should be long enough that it's never hit unless a server is unexpectedly rebooted, lost network connectivity or had some other catastrophic error. dry_run: do nothing, always succeed ctx: chromite.lib.gs.GSContext to use. """ self._gs_path = gs_path self._timeout = datetime.timedelta(minutes=lock_timeout_mins) self._contents = cros_build_lib.MachineDetails() self._generation = 0 self._dry_run = dry_run self._ctx = ctx if ctx is not None else gs.GSContext(dry_run=dry_run)
def GetHashSignatures(self, hashes, keysets=('update_signer', )): """Take an arbitrary list of hash files, and get them signed. Args: hashes: A list of hash values to be signed by the signer as bytes. They are all expected to be 32 bytes in length. keysets: list of keysets to have the hashes signed with. The default is almost certainly what you want. These names must match valid keysets on the signer. Returns: A list of lists of signatures as bytes in the order of the |hashes|. The list of signatures will correspond to the list of keysets passed in. hashes, keysets=['update_signer', 'update_signer-v2'] -> hashes[0] hashes[1] ... [ [sig_update_signer, sig_update_signer-v2], [...], ... ] Returns None if the process failed. Raises: Can raise a variety of GSLibError errors in extraordinary conditions. """ try: # Hash and signature names. hash_names = self._CreateHashNames(len(hashes)) # Create and upload the archive of hashes to sign. with tempfile.NamedTemporaryFile() as archive_file: self._CreateArchive(archive_file.name, hashes, hash_names) self._ctx.Copy(archive_file.name, self.archive_uri) # [sig_uri, ...] all_signature_uris = [] # { hash : [sig_uri, ...], ... } hash_signature_uris = dict([(h, []) for h in hashes]) # Upload one signing instruction file and signing request for # each keyset. for keyset in keysets: instructions_uri = self._CreateInstructionsURI(keyset) self._ctx.CreateWithContents( instructions_uri, self._CreateInstructions(hash_names, keyset)) # Create signer request file with debug friendly contents. self._ctx.CreateWithContents( self._SignerRequestUri(instructions_uri), cros_build_lib.MachineDetails()) # Remember which signatures we just requested. uris = self._CreateSignatureURIs(hash_names, keyset) all_signature_uris += uris for h, sig_uri in zip(hashes, uris): hash_signature_uris[h].append(sig_uri) # Wait for the signer to finish all keysets. if not self._WaitForSignatures(all_signature_uris): logging.error('Signer request timed out.') return None # Download the results. return [ self._DownloadSignatures(hash_signature_uris[h]) for h in hashes ] finally: # Clean up the signature related files from this run. self._CleanSignerFiles(hashes, keysets)
def testMachineDetails(self): """Verify we don't crash.""" contents = cros_build_lib.MachineDetails() self.assertNotEqual(contents, '') self.assertEqual(contents[-1], '\n')