Beispiel #1
0
    def testPrepColNameForSS(self):
        tests = {
            'foo': 'foo',
            'Foo': 'foo',
            'FOO': 'foo',
            'foo bar': 'foobar',
            'Foo Bar': 'foobar',
            'F O O B A R': 'foobar',
            'Foo/Bar': 'foobar',
            'Foo Bar/Dude': 'foobardude',
            'foo/bar': 'foobar',
        }

        for col in tests:
            expected = tests[col]
            self.assertEquals(expected, gdata_lib.PrepColNameForSS(col))
            self.assertEquals(expected, gdata_lib.PrepColNameForSS(expected))
    def __init__(self, row, cols=None):
        self.custom = {}

        if not cols:
            # If columns not specified, then column order doesn't matter.
            cols = row.keys()
        for col in cols:
            ss_col = gdata_lib.PrepColNameForSS(col)
            val = row[col]
            ss_val = gdata_lib.PrepValForSS(val)
            self.custom[ss_col] = SSEntry(ss_val)
Beispiel #3
0
  def _UploadChangedRows(self):
    """Upload all rows in table that need to be changed in spreadsheet."""
    rows_unchanged, rows_updated, rows_inserted = (0, 0, 0)

    # Go over all rows in csv table.  Identify existing row by the 'Package'
    # column.  Either update existing row or create new one.
    for csv_row in self._csv_table:
      # Seed new row values from csv_row values, with column translation.
      new_row = dict((gdata_lib.PrepColNameForSS(key),
                      csv_row[key]) for key in csv_row)

      # Retrieve row values already in spreadsheet, along with row index.
      csv_package = csv_row[self.ID_COL]
      ss_row = self._GetSSRowForPackage(csv_package)

      if ss_row:
        changed = [] # Gather changes for log message.

        # Check each key/value in new_row to see if it is different from what
        # is already in spreadsheet (ss_row).  Keep only differences to get
        # the row delta.
        row_delta = {}
        for col in new_row:
          if col in ss_row:
            ss_val = ss_row[col]
            new_val = new_row[col]
            if (ss_val or new_val) and ss_val != new_val:
              changed.append('%s="%s"->"%s"' % (col, ss_val, new_val))
              row_delta[col] = new_val

        if row_delta:
          self._scomm.UpdateRowCellByCell(ss_row.ss_row_num,
                                          gdata_lib.PrepRowForSS(row_delta))
          rows_updated += 1
          oper.Info('C %-30s: %s' % (csv_package, ', '.join(changed)))
        else:
          rows_unchanged += 1
          oper.Info('S %-30s:' % csv_package)
      else:
        self._scomm.InsertRow(gdata_lib.PrepRowForSS(new_row))
        rows_inserted += 1
        row_descr_list = []
        for col in sorted(new_row.keys()):
          if col != self.ID_COL:
            row_descr_list.append('%s="%s"' % (col, new_row[col]))
        oper.Info('A %-30s: %s' % (csv_package, ', '.join(row_descr_list)))

    return (rows_unchanged, rows_updated, rows_inserted)
    def _CreateRowCache(self, table):
        """Recreate the expected row cache (by pkg) from |table|."""
        if not table:
            return None

        row_cache = {}
        for rowIx, row in enumerate(table):
            pkg = row[self.COL_PKG]

            # Translate column names now.
            ss_row_dict = {}
            for col in row:
                ss_row_dict[gdata_lib.PrepColNameForSS(col)] = row[col]

            ss_row = gdata_lib.SpreadsheetRow('OrigRow%d' % (rowIx + 2),
                                              rowIx + 2, ss_row_dict)
            entry = row_cache.get(pkg)
            if not entry:
                row_cache[pkg] = ss_row
            elif type(entry) == list:
                row_cache[pkg] = entry + [ss_row]
            else:
                row_cache[pkg] = [entry, ss_row]
        return row_cache
class SyncerTest(cros_test_lib.MoxOutputTestCase):

    col_amd64 = utable.UpgradeTable.GetColumnName(
        utable.UpgradeTable.COL_STATE, 'amd64')
    col_amd64 = gdata_lib.PrepColNameForSS(col_amd64)
    col_arm = utable.UpgradeTable.GetColumnName(utable.UpgradeTable.COL_STATE,
                                                'arm')
    col_arm = gdata_lib.PrepColNameForSS(col_arm)
    col_x86 = utable.UpgradeTable.GetColumnName(utable.UpgradeTable.COL_STATE,
                                                'x86')
    col_x86 = gdata_lib.PrepColNameForSS(col_x86)

    def testInit(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        tcomm, scomm = 'TComm', 'SComm'

        # Replay script
        self.mox.ReplayAll()

        # Verify
        sps.Syncer.__init__(mocked_syncer, tcomm, scomm)
        self.mox.VerifyAll()
        self.assertEquals(scomm, mocked_syncer.scomm)
        self.assertEquals(tcomm, mocked_syncer.tcomm)
        self.assertEquals(None, mocked_syncer.teams)
        self.assertEquals(None, mocked_syncer.owners)
        self.assertEquals(False, mocked_syncer.pretend)
        self.assertEquals(False, mocked_syncer.verbose)

    def testReduceTeamName(self):
        syncer = sps.Syncer('tcomm_obj', 'scomm_obj')

        tests = {
            'build/bdavirro': 'build',
            'build/rtc': 'build',
            'build': 'build',
            'UI/zel': 'ui',
            'UI': 'ui',
            'Build': 'build',
            None: None,
        }

        # Verify
        for key in tests:
            result = syncer._ReduceTeamName(key)
            self.assertEquals(tests[key], result)

    def testReduceOwnerName(self):
        syncer = sps.Syncer('tcomm_obj', 'scomm_obj')

        tests = {
            'joe': 'joe',
            'Joe': 'joe',
            '*****@*****.**': 'joe',
            '*****@*****.**': 'joe',
            '*****@*****.**': 'joe.bob',
            None: None,
        }

        # Verify
        for key in tests:
            result = syncer._ReduceOwnerName(key)
            self.assertEquals(tests[key], result)

    def testSetTeamFilterOK(self):
        syncer = sps.Syncer('tcomm_obj', 'scomm_obj')

        tests = {
            'build:system:ui': set(['build', 'system', 'ui']),
            'Build:system:UI': set(['build', 'system', 'ui']),
            'kernel': set(['kernel']),
            'KERNEL': set(['kernel']),
            None: None,
            '': None,
        }

        # Verify
        for test in tests:
            syncer.SetTeamFilter(test)
            self.assertEquals(tests[test], syncer.teams)

    def testSetTeamFilterError(self):
        syncer = sps.Syncer('tcomm_obj', 'scomm_obj')

        # "systems" is not valid (should be "system")
        teamarg = 'build:systems'

        # Verify
        with self.OutputCapturer():
            self.assertRaises(SystemExit, sps.Syncer.SetTeamFilter, syncer,
                              teamarg)

    def testSetOwnerFilter(self):
        syncer = sps.Syncer('tcomm_obj', 'scomm_obj')

        tests = {
            'joe:bill:bob': set(['joe', 'bill', 'bob']),
            'Joe:Bill:BOB': set(['joe', 'bill', 'bob']),
            '[email protected]:bill:bob': set(['joe', 'bill', 'bob']),
            'joe': set(['joe']),
            '*****@*****.**': set(['joe']),
            '': None,
            None: None,
        }

        # Verify
        for test in tests:
            syncer.SetOwnerFilter(test)
            self.assertEquals(tests[test], syncer.owners)

    def testRowPassesFilters(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        row1 = {sps.COL_TEAM: 'build', sps.COL_OWNER: 'joe'}
        row2 = {sps.COL_TEAM: 'build', sps.COL_OWNER: 'bob'}
        row3 = {sps.COL_TEAM: 'build', sps.COL_OWNER: None}
        row4 = {sps.COL_TEAM: None, sps.COL_OWNER: None}

        teams1 = set(['build'])
        teams2 = set(['kernel'])
        teams3 = set(['build', 'ui'])

        owners1 = set(['joe'])
        owners2 = set(['bob'])
        owners3 = set(['joe', 'dan'])

        tests = [
            {
                'row': row1,
                'teams': None,
                'owners': None,
                'result': True
            },
            {
                'row': row1,
                'teams': teams1,
                'owners': None,
                'result': True
            },
            {
                'row': row1,
                'teams': teams2,
                'owners': None,
                'result': False
            },
            {
                'row': row1,
                'teams': teams3,
                'owners': None,
                'result': True
            },
            {
                'row': row1,
                'teams': teams1,
                'owners': owners1,
                'result': True
            },
            {
                'row': row1,
                'teams': None,
                'owners': owners2,
                'result': False
            },
            {
                'row': row1,
                'teams': None,
                'owners': owners3,
                'result': True
            },
            {
                'row': row2,
                'teams': None,
                'owners': None,
                'result': True
            },
            {
                'row': row2,
                'teams': teams1,
                'owners': None,
                'result': True
            },
            {
                'row': row2,
                'teams': teams2,
                'owners': None,
                'result': False
            },
            {
                'row': row2,
                'teams': teams3,
                'owners': None,
                'result': True
            },
            {
                'row': row2,
                'teams': teams1,
                'owners': owners1,
                'result': False
            },
            {
                'row': row2,
                'teams': None,
                'owners': owners2,
                'result': True
            },
            {
                'row': row2,
                'teams': None,
                'owners': owners3,
                'result': False
            },
            {
                'row': row3,
                'teams': None,
                'owners': None,
                'result': True
            },
            {
                'row': row3,
                'teams': teams1,
                'owners': None,
                'result': True
            },
            {
                'row': row3,
                'teams': teams2,
                'owners': None,
                'result': False
            },
            {
                'row': row3,
                'teams': teams3,
                'owners': None,
                'result': True
            },
            {
                'row': row3,
                'teams': teams1,
                'owners': owners1,
                'result': False
            },
            {
                'row': row3,
                'teams': None,
                'owners': owners2,
                'result': False
            },
            {
                'row': row3,
                'teams': None,
                'owners': owners3,
                'result': False
            },
            {
                'row': row4,
                'teams': None,
                'owners': None,
                'result': True
            },
            {
                'row': row4,
                'teams': teams1,
                'owners': None,
                'result': False
            },
            {
                'row': row4,
                'teams': teams1,
                'owners': owners1,
                'result': False
            },
            {
                'row': row4,
                'teams': None,
                'owners': owners2,
                'result': False
            },
        ]

        # Replay script
        for test in tests:
            done = False

            if test['teams']:
                row_team = test['row'][sps.COL_TEAM]
                mocked_syncer._ReduceTeamName(row_team).AndReturn(row_team)
                done = row_team not in test['teams']

            if not done and test['owners']:
                row_owner = test['row'][sps.COL_OWNER]
                mocked_syncer._ReduceOwnerName(row_owner).AndReturn(row_owner)
        self.mox.ReplayAll()

        # Verify
        for test in tests:
            mocked_syncer.teams = test['teams']
            mocked_syncer.owners = test['owners']
            result = sps.Syncer._RowPassesFilters(mocked_syncer, test['row'])

            msg = ('Expected following row to %s filter, but it did not:\n%r' %
                   ('pass' if test['result'] else 'fail', test['row']))
            msg += '\n  Using teams filter : %r' % mocked_syncer.teams
            msg += '\n  Using owners filter: %r' % mocked_syncer.owners
            self.assertEquals(test['result'], result, msg)
        self.mox.VerifyAll()

    def testSyncMissingTrackerColumn(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tcomm = mocked_tcomm

        # Replay script
        mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(None)
        self.mox.ReplayAll()

        # Verify
        self.assertRaises(sps.SyncError, sps.Syncer.Sync, mocked_syncer)
        self.mox.VerifyAll()

    def testSyncNewIssues(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tcomm = mocked_tcomm

        rows = [
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: None
            },
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: 'joe'
            },
        ]

        # Replay script
        mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(
            1)  # Any index ok.
        mocked_scomm.GetRows().AndReturn(rows)

        for ix in xrange(len(rows)):
            mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(True)
            mocked_syncer._GenIssueForRow(rows[ix]).AndReturn('NewIssue%d' %
                                                              ix)
            mocked_syncer._GetRowTrackerId(rows[ix]).AndReturn(None)
            mocked_syncer._CreateRowIssue(ix + 2, rows[ix], 'NewIssue%d' % ix)
        self.mox.ReplayAll()

        # Verify
        sps.Syncer.Sync(mocked_syncer)
        self.mox.VerifyAll()

    def testSyncClearIssues(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tcomm = mocked_tcomm

        rows = [
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: None
            },
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: 'joe'
            },
        ]

        # Replay script
        mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(
            1)  # Any index ok.
        mocked_scomm.GetRows().AndReturn(rows)

        for ix in xrange(len(rows)):
            mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(True)
            mocked_syncer._GenIssueForRow(rows[ix]).AndReturn(None)
            mocked_syncer._GetRowTrackerId(rows[ix]).AndReturn(123 + ix)
            mocked_syncer._ClearRowIssue(ix + 2, rows[ix])
        self.mox.ReplayAll()

        # Verify
        sps.Syncer.Sync(mocked_syncer)
        self.mox.VerifyAll()

    def testSyncFilteredOut(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tcomm = mocked_tcomm

        rows = [
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: None
            },
            {
                sps.COL_PACKAGE: 'd/f',
                sps.COL_TEAM: 'build',
                sps.COL_OWNER: 'joe'
            },
        ]

        # Replay script
        mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(
            1)  # Any index ok.
        mocked_scomm.GetRows().AndReturn(rows)

        for ix in xrange(len(rows)):
            mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(False)
        self.mox.ReplayAll()

        # Verify
        sps.Syncer.Sync(mocked_syncer)
        self.mox.VerifyAll()

    def testGetRowValue(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        row = {
            self.col_amd64: 'ABC',
            self.col_arm: 'XYZ',
            self.col_x86: 'FooBar',
            sps.COL_TEAM: 'build',
        }

        # Replay script
        self.mox.ReplayAll()

        # Verify
        result = sps.Syncer._GetRowValue(mocked_syncer, row, 'stateonamd64',
                                         'amd64')
        self.assertEquals('ABC', result)
        result = sps.Syncer._GetRowValue(mocked_syncer, row, 'stateonarm',
                                         'arm')
        self.assertEquals('XYZ', result)
        result = sps.Syncer._GetRowValue(mocked_syncer, row, 'stateonamd64',
                                         'amd64')
        self.assertEquals('ABC', result)
        result = sps.Syncer._GetRowValue(mocked_syncer, row, sps.COL_TEAM)
        self.assertEquals('build', result)
        self.mox.VerifyAll()

    def _TestGenIssueForRowNeedsUpgrade(self, row):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_syncer.default_owner = None
        mocked_syncer.scomm = cros_test_lib.EasyAttr(ss_key='SomeSSKey')

        # Replay script
        for arch in sps.ARCHES:
            state = sps.Syncer._GetRowValue(mocked_syncer, row,
                                            utable.UpgradeTable.COL_STATE,
                                            arch)
            mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
                                       arch).AndReturn(state)
        red_team = sps.Syncer._ReduceTeamName(mocked_syncer, row[sps.COL_TEAM])
        mocked_syncer._ReduceTeamName(row[sps.COL_TEAM]).AndReturn(red_team)
        red_owner = sps.Syncer._ReduceOwnerName(mocked_syncer,
                                                row[sps.COL_OWNER])
        mocked_syncer._ReduceOwnerName(row[sps.COL_OWNER]).AndReturn(red_owner)
        for arch in sps.ARCHES:
            mocked_syncer._GetRowValue(row,
                                       utable.UpgradeTable.COL_CURRENT_VER,
                                       arch).AndReturn('1')
            mocked_syncer._GetRowValue(
                row, utable.UpgradeTable.COL_STABLE_UPSTREAM_VER,
                arch).AndReturn('2')
            mocked_syncer._GetRowValue(
                row, utable.UpgradeTable.COL_LATEST_UPSTREAM_VER,
                arch).AndReturn('3')
        self.mox.ReplayAll()

        # Verify
        result = sps.Syncer._GenIssueForRow(mocked_syncer, row)
        self.mox.VerifyAll()
        return result

    def testGenIssueForRowNeedsUpgrade1(self):
        row = {
            self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
            self.col_arm: 'Not important',
            self.col_x86: 'Not important',
            sps.COL_TEAM: 'build',
            sps.COL_OWNER: None,
            sps.COL_PACKAGE: 'dev/foo',
        }

        result = self._TestGenIssueForRowNeedsUpgrade(row)
        self.assertEquals(None, result.owner)
        self.assertEquals(0, result.id)
        self.assertEquals('Untriaged', result.status)

    def testGenIssueForRowNeedsUpgrade2(self):
        row = {
            self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
            self.col_arm: utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED,
            self.col_x86: 'Not important',
            sps.COL_TEAM: 'build',
            sps.COL_OWNER: 'joe',
            sps.COL_PACKAGE: 'dev/foo',
        }

        result = self._TestGenIssueForRowNeedsUpgrade(row)
        self.assertEquals('*****@*****.**', result.owner)
        self.assertEquals(0, result.id)
        self.assertEquals('Available', result.status)

    def testGenIssueForRowNeedsUpgrade3(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        row = {
            self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
            self.col_arm: utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED,
            self.col_x86: 'Not important',
            sps.COL_TEAM: None,
            sps.COL_OWNER: 'joe',
            sps.COL_PACKAGE: 'dev/foo',
        }

        # Replay script
        for arch in sps.ARCHES:
            state = sps.Syncer._GetRowValue(mocked_syncer, row,
                                            utable.UpgradeTable.COL_STATE,
                                            arch)
            mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
                                       arch).AndReturn(state)
        reduced = sps.Syncer._ReduceTeamName(mocked_syncer, row[sps.COL_TEAM])
        mocked_syncer._ReduceTeamName(row[sps.COL_TEAM]).AndReturn(reduced)
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            self.assertRaises(RuntimeError, sps.Syncer._GenIssueForRow,
                              mocked_syncer, row)
        self.mox.VerifyAll()

    def testGenIssueForRowNoUpgrade(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        row = {
            self.col_amd64: 'Not important',
            self.col_arm: 'Not important',
            self.col_x86: 'Not important',
            sps.COL_TEAM: None,
            sps.COL_OWNER: 'joe',
            sps.COL_PACKAGE: 'dev/foo',
        }

        # Replay script
        for arch in sps.ARCHES:
            state = sps.Syncer._GetRowValue(mocked_syncer, row,
                                            utable.UpgradeTable.COL_STATE,
                                            arch)
            mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
                                       arch).AndReturn(state)
        self.mox.ReplayAll()

        # Verify
        result = sps.Syncer._GenIssueForRow(mocked_syncer, row)
        self.mox.VerifyAll()
        self.assertEquals(None, result)

    def testGetRowTrackerId(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        row = {sps.COL_TRACKER: '321'}

        # Replay script
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            result = sps.Syncer._GetRowTrackerId(mocked_syncer, row)
        self.mox.VerifyAll()
        self.assertEquals(321, result)

    def testCreateRowIssuePretend(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_syncer.pretend = True

        row = {sps.COL_PACKAGE: 'dev/foo'}

        # Replay script
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            sps.Syncer._CreateRowIssue(mocked_syncer, 5, row, 'some_issue')
        self.mox.VerifyAll()

    def testCreateRowIssue(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tcomm = mocked_tcomm
        mocked_syncer.tracker_col_ix = 8
        mocked_syncer.pretend = False

        row_ix = 5
        row = {sps.COL_PACKAGE: 'dev/foo'}
        issue = 'SomeIssue'
        issue_id = 234
        ss_issue_val = 'Hyperlink%d' % issue_id

        # Replay script
        mocked_scomm.ClearCellValue(row_ix, mocked_syncer.tracker_col_ix)
        mocked_tcomm.CreateTrackerIssue(issue).AndReturn(issue_id)
        mocked_syncer._GenSSLinkToIssue(issue_id).AndReturn(ss_issue_val)
        mocked_scomm.ReplaceCellValue(row_ix, mocked_syncer.tracker_col_ix,
                                      ss_issue_val)
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            sps.Syncer._CreateRowIssue(mocked_syncer, row_ix, row, issue)
        self.mox.VerifyAll()

    def testGenSSLinkToIssue(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)

        issue_id = 123

        # Replay script
        self.mox.ReplayAll()

        # Verify
        result = sps.Syncer._GenSSLinkToIssue(mocked_syncer, issue_id)
        self.mox.VerifyAll()
        self.assertEquals('=hyperlink("crosbug.com/123";"123")', result)

    def testClearRowIssue(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tracker_col_ix = 8
        mocked_syncer.pretend = False

        row_ix = 44
        row = {sps.COL_PACKAGE: 'dev/foo'}

        # Replay script
        mocked_scomm.ClearCellValue(row_ix, mocked_syncer.tracker_col_ix)
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            sps.Syncer._ClearRowIssue(mocked_syncer, row_ix, row)
        self.mox.VerifyAll()

    def testClearRowIssuePretend(self):
        mocked_syncer = self.mox.CreateMock(sps.Syncer)
        mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        mocked_syncer.scomm = mocked_scomm
        mocked_syncer.tracker_col_ix = 8
        mocked_syncer.pretend = True

        row_ix = 44
        row = {sps.COL_PACKAGE: 'dev/foo'}

        # Replay script
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            sps.Syncer._ClearRowIssue(mocked_syncer, row_ix, row)
        self.mox.VerifyAll()
class UploaderTest(cros_test_lib.MoxOutputTestCase):
    """Test the functionality of upload_package_status.Uploader class."""

    COL_PKG = 'Package'
    COL_SLOT = 'Slot'
    COL_OVERLAY = 'Overlay'
    COL_STATUS = 'Status'
    COL_VER = 'Current Version'
    COL_STABLE_UP = 'Stable Upstream Version'
    COL_LATEST_UP = 'Latest Upstream Version'
    COL_TARGET = 'Chrome OS Root Target'

    SS_COL_PKG = gdata_lib.PrepColNameForSS(COL_PKG)
    SS_COL_SLOT = gdata_lib.PrepColNameForSS(COL_SLOT)
    SS_COL_OVERLAY = gdata_lib.PrepColNameForSS(COL_OVERLAY)
    SS_COL_STATUS = gdata_lib.PrepColNameForSS(COL_STATUS)
    SS_COL_VER = gdata_lib.PrepColNameForSS(COL_VER)
    SS_COL_STABLE_UP = gdata_lib.PrepColNameForSS(COL_STABLE_UP)
    SS_COL_LATEST_UP = gdata_lib.PrepColNameForSS(COL_LATEST_UP)
    SS_COL_TARGET = gdata_lib.PrepColNameForSS(COL_TARGET)

    COLS = [
        COL_PKG,
        COL_SLOT,
        COL_OVERLAY,
        COL_STATUS,
        COL_VER,
        COL_STABLE_UP,
        COL_LATEST_UP,
        COL_TARGET,
    ]

    ROW0 = {
        COL_PKG: 'lib/foo',
        COL_SLOT: '0',
        COL_OVERLAY: 'portage',
        COL_STATUS: 'needs upgrade',
        COL_VER: '3.0.2',
        COL_STABLE_UP: '3.0.9',
        COL_LATEST_UP: '3.0.11',
        COL_TARGET: 'chromeos',
    }
    ROW1 = {
        COL_PKG: 'sys-dev/bar',
        COL_SLOT: '0',
        COL_OVERLAY: 'chromiumos-overlay',
        COL_STATUS: 'needs upgrade',
        COL_VER: '1.2.3-r1',
        COL_STABLE_UP: '1.2.3-r2',
        COL_LATEST_UP: '1.2.4',
        COL_TARGET: 'chromeos-dev',
    }
    ROW2 = {
        COL_PKG: 'sys-dev/raster',
        COL_SLOT: '1',
        COL_OVERLAY: 'chromiumos-overlay',
        COL_STATUS: 'current',
        COL_VER: '1.2.3',
        COL_STABLE_UP: '1.2.3',
        COL_LATEST_UP: '1.2.4',
        COL_TARGET: 'chromeos-test',
    }

    SS_ROW0 = dict([(gdata_lib.PrepColNameForSS(c), v)
                    for c, v in ROW0.items()])
    SS_ROW1 = dict([(gdata_lib.PrepColNameForSS(c), v)
                    for c, v in ROW1.items()])
    SS_ROW2 = dict([(gdata_lib.PrepColNameForSS(c), v)
                    for c, v in ROW2.items()])

    EMAIL = '*****@*****.**'
    PASSWORD = '******'

    def _MockUploader(self, table=None):
        """Set up a mocked Uploader object."""
        uploader = self.mox.CreateMock(ups.Uploader)

        if not table:
            # Use default table
            table = self._CreateDefaultTable()

        for slot in ups.Uploader.__slots__:
            uploader.__setattr__(slot, None)

        uploader._csv_table = table
        uploader._scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
        uploader._creds = cros_test_lib.EasyAttr(user=self.EMAIL,
                                                 password=self.PASSWORD)
        uploader._ss_row_cache = self._CreateRowCache(table)

        return uploader

    def _CreateRowCache(self, table):
        """Recreate the expected row cache (by pkg) from |table|."""
        if not table:
            return None

        row_cache = {}
        for rowIx, row in enumerate(table):
            pkg = row[self.COL_PKG]

            # Translate column names now.
            ss_row_dict = {}
            for col in row:
                ss_row_dict[gdata_lib.PrepColNameForSS(col)] = row[col]

            ss_row = gdata_lib.SpreadsheetRow('OrigRow%d' % (rowIx + 2),
                                              rowIx + 2, ss_row_dict)
            entry = row_cache.get(pkg)
            if not entry:
                row_cache[pkg] = ss_row
            elif type(entry) == list:
                row_cache[pkg] = entry + [ss_row]
            else:
                row_cache[pkg] = [entry, ss_row]
        return row_cache

    def _CreateDefaultTable(self):
        return self._CreateTableWithRows(self.COLS, [self.ROW0, self.ROW1])

    def _CreateTableWithRows(self, cols, rows):
        mytable = tablelib.Table(list(cols))
        if rows:
            for row in rows:
                mytable.AppendRow(dict(row))
        return mytable

    def testLoadTable(self):
        # Note that this test is not actually for method of Uploader class.

        self.mox.StubOutWithMock(tablelib.Table, 'LoadFromCSV')
        csv = 'any.csv'

        # Replay script
        tablelib.Table.LoadFromCSV(csv).AndReturn('loaded_table')
        self.mox.ReplayAll()

        # Verification steps.
        with self.OutputCapturer():
            loaded_table = ups.LoadTable(csv)
            self.assertEquals(loaded_table, 'loaded_table')

    def testGetSSRowForPackage(self):
        mocked_uploader = self._MockUploader()

        # No replay script.
        self.mox.ReplayAll()

        # Verification steps.
        result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
                                                  self.ROW0[self.COL_PKG])
        self.assertEquals(result, self.SS_ROW0)
        self.assertEquals(2, result.ss_row_num)
        result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
                                                  self.ROW1[self.COL_PKG])
        self.assertEquals(result, self.SS_ROW1)
        self.assertEquals(3, result.ss_row_num)
        result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
                                                  self.ROW2[self.COL_PKG])
        self.assertEquals(result, None)

        self.mox.VerifyAll()

    def testUploadFirstWorksheet(self):
        mocked_uploader = self._MockUploader()

        # Clear ._scomm attribute to simulate uploading first worksheet.
        mocked_scomm = mocked_uploader._scomm
        mocked_uploader._scomm = None

        self.mox.StubOutWithMock(gdata_lib.SpreadsheetComm, '__new__')

        ss_key = 'Some ss_key'
        ws_name = 'Some ws_name'

        # Replay script
        gdata_lib.SpreadsheetComm.__new__(
            gdata_lib.SpreadsheetComm).AndReturn(mocked_scomm)
        mocked_scomm.Connect(mocked_uploader._creds,
                             ss_key,
                             ws_name,
                             source='Upload Package Status')
        mocked_scomm.GetRowCacheByCol(self.SS_COL_PKG).AndReturn('RowCache')
        mocked_uploader._UploadChangedRows().AndReturn(tuple([0, 1, 2]))
        mocked_uploader._DeleteOldRows().AndReturn(tuple([3, 4]))
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            ups.Uploader.Upload(mocked_uploader, ss_key, ws_name)
            self.mox.VerifyAll()

    def testUploadSecondWorksheet(self):
        mocked_uploader = self._MockUploader()

        ss_key = 'Some ss_key'
        ws_name = 'Some ws_name'

        # Replay script
        mocked_uploader._scomm.SetCurrentWorksheet(ws_name)
        mocked_uploader._scomm.GetRowCacheByCol(
            self.SS_COL_PKG).AndReturn('RCache')
        mocked_uploader._UploadChangedRows().AndReturn(tuple([0, 1, 2]))
        mocked_uploader._DeleteOldRows().AndReturn(tuple([3, 4]))
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            ups.Uploader.Upload(mocked_uploader, ss_key, ws_name)
            self.mox.VerifyAll()

    def testUploadChangedRows(self):
        table = self._CreateTableWithRows(self.COLS,
                                          [self.ROW0, self.ROW1, self.ROW2])
        mocked_uploader = self._MockUploader(table=table)

        def RowVerifier(row_delta, golden_col_set, golden_row):
            if golden_col_set != set(row_delta.keys()):
                return False

            for col in row_delta:
                val = row_delta[col]
                if val != golden_row[col]:
                    return False

            return True

        # First Row.
        # Pretend first row does not exist already in online spreadsheet
        # by returning (None, None) from _GetSSRowForPackage.
        #
        row0_pkg = self.ROW0[self.COL_PKG]
        mocked_uploader._GetSSRowForPackage(row0_pkg).AndReturn(None)
        mocked_uploader._scomm.InsertRow(mox.IgnoreArg())

        # Second Row.
        # Pretend second row does already exist in online spreadsheet, and
        # pretend that it has a different value that needs to be changed
        # by an upload.
        row1_pkg = self.ROW1[self.COL_PKG]
        row1_reverse_delta = {self.SS_COL_VER: '1.2.3'}
        ss_row1 = dict(self.SS_ROW1)
        for col in row1_reverse_delta:
            ss_row1[col] = row1_reverse_delta[col]
        ss_row1 = gdata_lib.SpreadsheetRow('OrigRow1', 3, ss_row1)
        mocked_uploader._GetSSRowForPackage(row1_pkg).AndReturn(ss_row1)
        # Prepare verfication for row.
        g_col_set1 = set(row1_reverse_delta.keys())
        g_row1 = gdata_lib.PrepRowForSS(self.SS_ROW1)
        row1_verifier = lambda rdelta: RowVerifier(rdelta, g_col_set1, g_row1)
        mocked_uploader._scomm.UpdateRowCellByCell(3, mox.Func(row1_verifier))

        # Third Row.
        # Pretend third row does already exist in online spreadsheet, and
        # pretend that several values need to be changed by an upload.
        row2_pkg = self.ROW2[self.COL_PKG]
        row2_reverse_delta = {
            self.SS_COL_STATUS: 'needs upgrade',
            self.SS_COL_VER: '0.5',
            self.SS_COL_TARGET: 'chromeos-foo',
        }
        ss_row2 = dict(self.SS_ROW2)
        for col in row2_reverse_delta:
            ss_row2[col] = row2_reverse_delta[col]
        ss_row2 = gdata_lib.SpreadsheetRow('OrigRow2', 4, ss_row2)
        mocked_uploader._GetSSRowForPackage(row2_pkg).AndReturn(ss_row2)
        # Prepare verification for row.
        g_col_set2 = set(row2_reverse_delta.keys())
        g_row2 = gdata_lib.PrepRowForSS(self.SS_ROW2)
        row2_verifier = lambda rdelta: RowVerifier(rdelta, g_col_set2, g_row2)
        mocked_uploader._scomm.UpdateRowCellByCell(4, mox.Func(row2_verifier))

        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            ups.Uploader._UploadChangedRows(mocked_uploader)
        self.mox.VerifyAll()

    def testDeleteOldRows(self):
        mocked_uploader = self._MockUploader()

        # Pretend spreadsheet has 2 rows, one in table and one not.
        ss_row1 = gdata_lib.SpreadsheetRow('OrigRow1', 2, self.SS_ROW1)
        ss_row2 = gdata_lib.SpreadsheetRow('OrigRow2', 3, self.SS_ROW2)
        ss_rows = (ss_row1, ss_row2)

        mocked_uploader._scomm.GetRows().AndReturn(ss_rows)
        # We expect ROW2 in spreadsheet to be deleted.
        mocked_uploader._scomm.DeleteRow('OrigRow2')
        self.mox.ReplayAll()

        # Verify
        with self.OutputCapturer():
            ups.Uploader._DeleteOldRows(mocked_uploader)
        self.mox.VerifyAll()
Beispiel #7
0
class Uploader(object):
  """Uploads portage package status data from csv file to Google spreadsheet."""

  __slots__ = ('_creds',          # gdata_lib.Creds object
               '_scomm',          # gdata_lib.SpreadsheetComm object
               '_ss_row_cache',   # dict with key=pkg, val=SpreadsheetRow obj
               '_csv_table',      # table.Table of csv rows
               )

  ID_COL = utable.UpgradeTable.COL_PACKAGE
  SS_ID_COL = gdata_lib.PrepColNameForSS(ID_COL)
  SOURCE = 'Uploaded from CSV'

  def __init__(self, creds, table_obj):
    self._creds = creds
    self._csv_table = table_obj
    self._scomm = None
    self._ss_row_cache = None

  def _GetSSRowForPackage(self, package):
    """Return the SpreadsheetRow corresponding to Package=|package|."""
    if package in self._ss_row_cache:
      row = self._ss_row_cache[package]

      if isinstance(row, list):
        raise LookupError('More than one row in spreadsheet with Package=%s' %
                          package)

      return row

    return None

  def Upload(self, ss_key, ws_name):
    """Upload |_csv_table| to the given Google Spreadsheet.

    The spreadsheet is identified the spreadsheet key |ss_key|.
    The worksheet within that spreadsheet is identified by the
    worksheet name |ws_name|.
    """
    if self._scomm:
      self._scomm.SetCurrentWorksheet(ws_name)
    else:
      self._scomm = gdata_lib.SpreadsheetComm()
      self._scomm.Connect(self._creds, ss_key, ws_name,
                          source='Upload Package Status')

    oper.Notice('Caching rows for worksheet %r.' % self._scomm.ws_name)
    self._ss_row_cache = self._scomm.GetRowCacheByCol(self.SS_ID_COL)

    oper.Notice('Uploading changes to worksheet "%s" of spreadsheet "%s" now.' %
                (self._scomm.ws_name, self._scomm.ss_key))

    oper.Info('Details by package: S=Same, C=Changed, A=Added, D=Deleted')
    rows_unchanged, rows_updated, rows_inserted = self._UploadChangedRows()
    rows_deleted, rows_with_owner_deleted = self._DeleteOldRows()

    oper.Notice('Final row stats for worksheet "%s"'
                ': %d changed, %d added, %d deleted, %d same.' %
                (self._scomm.ws_name, rows_updated, rows_inserted,
                 rows_deleted, rows_unchanged))
    if rows_with_owner_deleted:
      oper.Warning('%d rows with owner entry deleted, see above warnings.' %
                   rows_with_owner_deleted)
    else:
      oper.Notice('No rows with owner entry were deleted.')

  def _UploadChangedRows(self):
    """Upload all rows in table that need to be changed in spreadsheet."""
    rows_unchanged, rows_updated, rows_inserted = (0, 0, 0)

    # Go over all rows in csv table.  Identify existing row by the 'Package'
    # column.  Either update existing row or create new one.
    for csv_row in self._csv_table:
      # Seed new row values from csv_row values, with column translation.
      new_row = dict((gdata_lib.PrepColNameForSS(key),
                      csv_row[key]) for key in csv_row)

      # Retrieve row values already in spreadsheet, along with row index.
      csv_package = csv_row[self.ID_COL]
      ss_row = self._GetSSRowForPackage(csv_package)

      if ss_row:
        changed = [] # Gather changes for log message.

        # Check each key/value in new_row to see if it is different from what
        # is already in spreadsheet (ss_row).  Keep only differences to get
        # the row delta.
        row_delta = {}
        for col in new_row:
          if col in ss_row:
            ss_val = ss_row[col]
            new_val = new_row[col]
            if (ss_val or new_val) and ss_val != new_val:
              changed.append('%s="%s"->"%s"' % (col, ss_val, new_val))
              row_delta[col] = new_val

        if row_delta:
          self._scomm.UpdateRowCellByCell(ss_row.ss_row_num,
                                          gdata_lib.PrepRowForSS(row_delta))
          rows_updated += 1
          oper.Info('C %-30s: %s' % (csv_package, ', '.join(changed)))
        else:
          rows_unchanged += 1
          oper.Info('S %-30s:' % csv_package)
      else:
        self._scomm.InsertRow(gdata_lib.PrepRowForSS(new_row))
        rows_inserted += 1
        row_descr_list = []
        for col in sorted(new_row.keys()):
          if col != self.ID_COL:
            row_descr_list.append('%s="%s"' % (col, new_row[col]))
        oper.Info('A %-30s: %s' % (csv_package, ', '.join(row_descr_list)))

    return (rows_unchanged, rows_updated, rows_inserted)

  def _DeleteOldRows(self):
    """Delete all rows from spreadsheet that not found in table."""
    oper.Notice('Checking for rows in worksheet that should be deleted now.')

    rows_deleted, rows_with_owner_deleted = (0, 0)

    # Also need to delete rows in spreadsheet that are not in csv table.
    ss_rows = self._scomm.GetRows()
    for ss_row in ss_rows:
      ss_package = gdata_lib.ScrubValFromSS(ss_row[self.SS_ID_COL])

      # See whether this row is in csv table.
      csv_rows = self._csv_table.GetRowsByValue({ self.ID_COL: ss_package })
      if not csv_rows:
        # Row needs to be deleted from spreadsheet.
        owner_val = None
        owner_notes_val = None
        row_descr_list = []
        for col in sorted(ss_row.keys()):
          if col == 'owner':
            owner_val = ss_row[col]
          if col == 'ownernotes':
            owner_notes_val = ss_row[col]

          # Don't include ID_COL value in description, it is in prefix already.
          if col != self.SS_ID_COL:
            val = ss_row[col]
            row_descr_list.append('%s="%s"' % (col, val))

        oper.Info('D %-30s: %s' % (ss_package, ', '.join(row_descr_list)))
        if owner_val or owner_notes_val:
          rows_with_owner_deleted += 1
          oper.Notice('WARNING: Deleting spreadsheet row with owner entry:\n' +
                      '  %-30s: Owner=%s, Owner Notes=%s' %
                      (ss_package, owner_val, owner_notes_val))

        self._scomm.DeleteRow(ss_row.ss_row_obj)
        rows_deleted += 1

    return (rows_deleted, rows_with_owner_deleted)
Beispiel #8
0
 def _GetRowValue(self, row, colName, arch=None):
   """Get value from |row| at |colName|, adjusted for |arch|"""
   if arch:
     colName = utable.UpgradeTable.GetColumnName(colName, arch=arch)
   colName = gdata_lib.PrepColNameForSS(colName)
   return row[colName]
Beispiel #9
0
from chromite.lib import operation
from chromite.lib import upgrade_table as utable
from chromite.scripts import upload_package_status as ups

# pylint: disable=W0201,R0904

PROJECT_NAME = 'chromium-os'

PKGS_WS_NAME = 'Packages'

CROS_ORG = 'chromium.org'
CHROMIUMOS_SITE = 'http://www.%s/%s' % (CROS_ORG, PROJECT_NAME)
PKG_UPGRADE_PAGE = '%s/gentoo-package-upgrade-process' % CHROMIUMOS_SITE
DOCS_SITE = 'https://docs.google.com/a'

COL_PACKAGE = gdata_lib.PrepColNameForSS(utable.UpgradeTable.COL_PACKAGE)
COL_TEAM = gdata_lib.PrepColNameForSS('Team/Lead')
COL_OWNER = gdata_lib.PrepColNameForSS('Owner')
COL_TRACKER = gdata_lib.PrepColNameForSS('Tracker')

ARCHES = ('amd64', 'arm', 'x86')

oper = operation.Operation('sync_package_status')


def _GetPkgSpreadsheetURL(ss_key):
  return '%s/%s/spreadsheet/ccc?key=%s' % (DOCS_SITE, CROS_ORG, ss_key)


class SyncError(RuntimeError):
  """Extend RuntimeError for use in this module."""