def testGPGVerify(self): """Test the GPG Verify method.""" conf = {'gpg_fingerprint': 'AAA', 'gpg': True} source = zsyncsource.ZSyncSource(conf) self.mox.StubOutWithMock(curl, 'CurlFetch') curl.CurlFetch('remote_sig', source.conn, source.log).AndReturn( (200, 'headers', 'body')) sig = 1 signed = 2 self.mox.StubOutWithMock(pyme.core, 'Data') pyme.core.Data('body').AndReturn(sig) pyme.core.Data(file='local_file').AndReturn(signed) result = self.mox.CreateMockAnything() result.signatures = [self.mox.CreateMockAnything()] result.signatures[0].fpr = 'AAA' key_mock = self.mox.CreateMockAnything() key_mock.uids = [self.mox.CreateMockAnything()] key_mock.uids[0].uid = 'Foobar' context = self.mox.CreateMockAnything() context.op_verify(sig, signed, None) context.op_verify_result().AndReturn(result) context.get_key('AAA', 0).AndReturn(key_mock) self.mox.ReplayAll() self.assertTrue(source._GPGVerify('local_file', 'remote_sig', context))
def _GPGVerify(self, local_path, remote_sig, context=None): """Verify the file with a GPG signature. Args: local_path: Path to local file remote_sig: URL to signature file context: pyme Context object Returns: Bool """ if not self.conf.get('gpg_fingerprint'): self._ImportPubKey() self.log.debug('fetching: %s', remote_sig) self.conn.setopt(pycurl.RANGE, '0-') (resp_code, _, sig) = curl.CurlFetch(remote_sig, self.conn, self.log) if resp_code not in (200, 206): self.log.error('Could not fetch %s', remote_sig) return False if not context: context = pyme.core.Context() sig = pyme.core.Data(sig) self.log.debug('gpg verify: %s', local_path) signed = pyme.core.Data(file=local_path) context.op_verify(sig, signed, None) result = context.op_verify_result() if len(result.signatures) > 0: sign = result.signatures[0] else: self.log.error('No signatures in result: %s', result) return False while sign: if self.conf.get('gpg_fingerprint') == sign.fpr: self.log.info('Successfully verified file %r signed by %r', local_path, context.get_key(sign.fpr, 0).uids[0].uid) return True if hasattr(sign, 'next'): sign = sign.next else: sign = None return False
def GetUpdates(self, source, url, since): """Get updates from a source. Args: source: A data source url: url to the data we want since: a timestamp representing the last change (None to force-get) Returns: A tuple containing the map of updates and a maximum timestamp Raises: ValueError: an object in the source map is malformed ConfigurationError: """ proto = url.split(':')[0] # Newer libcurl allow you to disable protocols there. Unfortunately # it's not in dapper or hardy. if proto not in ('http', 'https'): raise error.ConfigurationError('Unsupported protocol %s' % proto) conn = source.conn conn.setopt(pycurl.OPT_FILETIME, 1) conn.setopt(pycurl.ENCODING, 'bzip2, gzip') if since is not None: conn.setopt(pycurl.TIMEVALUE, int(since)) conn.setopt(pycurl.TIMECONDITION, pycurl.TIMECONDITION_IFMODSINCE) retry_count = 0 resp_code = 500 while retry_count < source.conf['retry_max']: try: source.log.debug('fetching %s', url) (resp_code, headers, body) = curl.CurlFetch(url, conn, self.log) self.log.debug('response code: %s', resp_code) finally: if resp_code < 400: # Not modified-since if resp_code == 304: return [] if resp_code == 200: break retry_count += 1 self.log.warning('Failed connection: attempt #%s.', retry_count) if retry_count == source.conf['retry_max']: self.log.debug('max retries hit') raise error.SourceUnavailable('Max retries exceeded.') time.sleep(source.conf['retry_delay']) headers = headers.split('\r\n') last_modified = conn.getinfo(pycurl.INFO_FILETIME) self.log.debug('last modified: %s', last_modified) if last_modified == -1: for header in headers: if header.lower().startswith('last-modified'): self.log.debug('%s', header) http_ts_string = header[header.find(':') + 1:].strip() last_modified = self.FromHttpToTimestamp(http_ts_string) break else: http_ts_string = '' else: http_ts_string = self.FromTimestampToHttp(last_modified) self.log.debug('Last-modified is: %s', http_ts_string) # curl (on Ubuntu hardy at least) will handle gzip, but not bzip2 try: response = cStringIO.StringIO(bz2.decompress(body)) self.log.debug('bzip encoding found') except IOError: response = cStringIO.StringIO(body) data_map = self.GetMap(cache_info=response) if http_ts_string: http_ts = self.FromHttpToTimestamp(http_ts_string) self.log.debug('setting last modified to: %s', http_ts) data_map.SetModifyTimestamp(http_ts) return data_map