Esempio n. 1
0
  def testLockTimeout(self):
    """Test getting a lock when an old timed out one is present."""
    with gs.TemporaryURL('gslock') as lock_uri:
      # Both locks are always timed out.
      lock1 = gslock.Lock(lock_uri, lock_timeout_mins=-1)
      lock2 = gslock.Lock(lock_uri, lock_timeout_mins=-1)

      lock1.Acquire()
      lock2.Acquire()
Esempio n. 2
0
def _InProcessDoubleAcquire(lock_uri):
  """Acquire a lock in a sub-process, and reacquire it a second time.

  Do not release the lock after acquiring.

  This helper has to be pickleable, so can't be a member of the test class.

  Args:
    lock_uri: URI of the lock to acquire.

  Returns:
    int describing how many times it acquired a lock.
  """
  count = 0

  lock = gslock.Lock(lock_uri)
  try:
    lock.Acquire()
    count += 1
    lock.Acquire()
    count += 1
  except gslock.LockNotAcquired:
    pass

  return count
Esempio n. 3
0
 def testDryrunLock(self):
   """Ensure that lcok can be obtained and released in dry-run mode."""
   with gs.TemporaryURL('gslock') as lock_uri:
     lock = gslock.Lock(lock_uri, dry_run=True)
     self.assertIsNone(lock.Acquire())
     self.assertFalse(self.ctx.Exists(lock_uri))
     self.assertIsNone(lock.Release())
    def testLockRepetition(self):
        """Test aquiring same lock multiple times."""
        # Force a known host name.
        self.PatchObject(cros_build_lib,
                         'MachineDetails',
                         return_value='TestHost')

        with gs.TemporaryURL('gslock') as lock_uri:
            lock = gslock.Lock(lock_uri)

            self.assertFalse(self.ctx.Exists(lock_uri))
            lock.Acquire()
            self.assertTrue(self.ctx.Exists(lock_uri))

            lock.Acquire()
            self.assertTrue(self.ctx.Exists(lock_uri))

            lock.Release()
            self.assertFalse(self.ctx.Exists(lock_uri))

            lock.Acquire()
            self.assertTrue(self.ctx.Exists(lock_uri))

            lock.Release()
            self.assertFalse(self.ctx.Exists(lock_uri))
Esempio n. 5
0
 def testDryrunLockRepetition(self):
   """Test aquiring same lock multiple times in dry-run mode."""
   with gs.TemporaryURL('gslock') as lock_uri:
     lock = gslock.Lock(lock_uri, dry_run=True)
     self.assertIsNone(lock.Acquire())
     self.assertIsNone(lock.Acquire())
     self.assertIsNone(lock.Release())
     self.assertIsNone(lock.Acquire())
     self.assertIsNone(lock.Release())
Esempio n. 6
0
  def testLockConflict(self):
    """Test lock conflict."""
    with gs.TemporaryURL('gslock') as lock_uri:
      lock1 = gslock.Lock(lock_uri)
      lock2 = gslock.Lock(lock_uri)

      # Manually lock 1, and ensure lock2 can't lock.
      lock1.Acquire()
      self.assertRaises(gslock.LockNotAcquired, lock2.Acquire)
      lock1.Release()

      # Use a with clause on 2, and ensure 1 can't lock.
      with lock2:
        self.assertRaises(gslock.LockNotAcquired, lock1.Acquire)

      # Ensure we can renew a given lock.
      lock1.Acquire()
      lock1.Renew()
      lock1.Release()

      # Ensure we get an error renewing a lock we don't hold.
      self.assertRaises(gslock.LockNotAcquired, lock1.Renew)
Esempio n. 7
0
def _InProcessAcquire(lock_uri):
  """Acquire a lock in a sub-process, but don't release.

  This helper has to be pickleable, so can't be a member of the test class.

  Args:
    lock_uri: URI of the lock to acquire.

  Returns:
    boolean telling if this method got the lock.
  """
  lock = gslock.Lock(lock_uri)
  try:
    lock.Acquire()
    return True
  except gslock.LockNotAcquired:
    return False
Esempio n. 8
0
  def testLock(self):
    """Test getting a lock."""
    # Force a known host name.
    self.PatchObject(cros_build_lib, 'MachineDetails', return_value='TestHost')

    with gs.TemporaryURL('gslock') as lock_uri:
      lock = gslock.Lock(lock_uri)

      self.assertFalse(self.ctx.Exists(lock_uri))
      lock.Acquire()
      self.assertTrue(self.ctx.Exists(lock_uri))

      contents = self.ctx.Cat(lock_uri)
      self.assertEqual(contents, 'TestHost')

      lock.Release()
      self.assertFalse(self.ctx.Exists(lock_uri))
    def _CleanSignerFilesByKeyset(self, hashes, keyset, timeout=600):
        """Helper method that cleans up GS files associated with a single keyset.

    Args:
      hashes: A list of hash values to be signed by the signer in string
              format. They are all expected to be 32 bytes in length.
      keyset: keyset to have the hashes signed with.
      timeout: Timeout for acquiring the lock on the files to clean.

    Raises:
      gslock.LockNotAcquired if we can't get a lock on the data within timeout.
    """
        hash_names = self._CreateHashNames(len(hashes))

        instructions_uri = self._CreateInstructionsURI(keyset)
        request_uri = self._SignerRequestUri(instructions_uri)
        signature_uris = self._CreateSignatureURIs(hash_names, keyset)

        paths = [instructions_uri, request_uri]
        paths += signature_uris
        paths += [s + '.md5' for s in signature_uris]

        end_time = time.time() + timeout

        while True:
            try:
                with gslock.Lock(request_uri + '.lock'):
                    for path in paths:
                        self._ctx.Remove(path, ignore_missing=True)

                    return
            except gslock.LockNotAcquired:
                # If we have timed out.
                if time.time() > end_time:
                    raise

                time.sleep(DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS)
Esempio n. 10
0
def _InProcessDataUpdate(lock_uri_data_uri):
  """Increment a number in a GS file protected by a lock.

  Keeps looking until the lock is acquired, so effectively, blocking. Stores
  or increments an integer in the data_uri by one, once.

  This helper has to be pickleable, so can't be a member of the test class.

  Args:
    lock_uri_data_uri: Tuple containing (lock_uri, data_uri). Passed as
                       a tuple, since multiprocessing.Pool.map only allows
                       a single argument in.

    lock_uri: URI of the lock to acquire.
    data_uri: URI of the data file to create/increment.

  Returns:
    boolean describing if this method got the lock.
  """
  lock_uri, data_uri = lock_uri_data_uri
  ctx = gs.GSContext()

  # Keep trying until the lock is acquired.
  while True:
    try:
      with gslock.Lock(lock_uri):
        if ctx.Exists(data_uri):
          data = int(ctx.Cat(data_uri)) + 1
        else:
          data = 1

        ctx.CreateWithContents(data_uri, str(data))
        return True

    except gslock.LockNotAcquired:
      pass
Esempio n. 11
0
    def CreatePayloads(self):
        """Get lock on this build, and Process if we succeed.

    While holding the lock, check assorted build flags to see if we should
    process this build.

    Raises:
      BuildLocked: If the build is locked by another server or process.
    """
        lock_uri = self._GetFlagURI(gspaths.ChromeosReleases.LOCK)
        suite_name = None

        logging.info('Examining: %s', self._build)

        try:
            with gslock.Lock(lock_uri):
                logging.info('Starting: %s', self._build)

                payloads, payload_tests = self._DiscoverRequiredPayloads()

                # Find out which payloads already exist, updating the payload object's
                # URI accordingly. In doing so we're creating a list of all payload
                # objects and their skip/exist attributes. We're also recording whether
                # this run will be skipping any actual work.
                for p in payloads:
                    try:
                        result = self._FindExistingPayloads(p)
                        if result:
                            p.exists = True
                            p.uri = result[0]
                    except gs.GSNoSuchKey:
                        pass

                # Display the required payload generation list.
                log_items = []
                for p in payloads:
                    desc = str(p)
                    if p['exists']:
                        desc += ' (exists)'
                    log_items.append(desc)

                _LogList('All payloads for the build', log_items)

                # Generate new payloads.
                new_payloads = [p for p in payloads if not p['exists']]
                if new_payloads:
                    logging.info('Generating %d new payload(s)',
                                 len(new_payloads))
                    self._GeneratePayloads(new_payloads)
                    logging.info('Finished generating payloads: %s',
                                 self._build)
                else:
                    logging.info('No new payloads to generate')

                # Check that the build has a corresponding archive directory. The lab
                # can only execute control files for tests from this location.
                archive_board, archive_build, archive_build_uri = (
                    self._MapToArchive(self._build.board, self._build.version))
                self._archive_board = archive_board
                self._archive_build = archive_build
                self._archive_build_uri = archive_build_uri

                # We have a control file directory and all payloads have been
                # generated. Lets create the list of tests to conduct.
                logging.info('Uploading %d payload tests', len(payload_tests))
                suite_name = self._AutotestPayloads(payload_tests)

        except gslock.LockNotAcquired as e:
            logging.info('Build already being processed: %s', e)
            raise BuildLocked()

        except EarlyExit:
            logging.info('Nothing done: %s', self._build)
            raise

        except Exception:
            logging.error('Failed: %s', self._build)
            raise

        finally:
            self._CleanupBuild()

        return (suite_name, self._archive_board, self._archive_build,
                self._payload_test_configs)