def verifyImage(self, runner, table): """Verify table against accuraterip and cue_path track lengths. Verify our image against the given AccurateRip responses. Needs an initialized self.result. Will set accurip and friends on each TrackResult. Populates self.result.tracks with above TrackResults. :param runner: :type runner: :param table: :type table: """ cueImage = image.Image(self.cuePath) # assigns track lengths verifytask = image.ImageVerifyTask(cueImage) runner.run(verifytask) if verifytask.exception: logger.error(verifytask.exceptionMessage) return False responses = accurip.get_db_entry(table.accuraterip_path()) logger.info('%d AccurateRip response(s) found' % len(responses)) checksums = accurip.calculate_checksums([ os.path.join(os.path.dirname(self.cuePath), t.indexes[1].path) for t in filter(lambda t: t.number != 0, cueImage.cue.table.tracks) ]) if not (checksums and any(checksums['v1']) and any(checksums['v2'])): return False return accurip.verify_result(self.result, responses, checksums)
def test_uses_cache_dir(self): # copy normal entry into other entry's place makedirs(dirname(join(self.cache_dir, self.other_path))) copy(join(dirname(__file__), self.path[6:]), join(self.cache_dir, self.other_path)) # ask cache for other entry and assert cached entry equals normal entry self.assertEqual(self.entry, get_db_entry(self.other_path))
def verifyImage(self, runner, table): """ verify table against accuraterip and cue_path track lengths Verify our image against the given AccurateRip responses. Needs an initialized self.result. Will set accurip and friends on each TrackResult. Populates self.result.tracks with above TrackResults. """ cueImage = image.Image(self.cuePath) # assigns track lengths verifytask = image.ImageVerifyTask(cueImage) runner.run(verifytask) if verifytask.exception: logger.error(verifytask.exceptionMessage) return False responses = accurip.get_db_entry(table.accuraterip_path()) logger.info('%d AccurateRip response(s) found', len(responses)) checksums = accurip.calculate_checksums([ os.path.join(os.path.dirname(self.cuePath), t.indexes[1].path) for t in [t for t in cueImage.cue.table.tracks if t.number != 0] ]) if not (checksums and any(checksums['v1']) and any(checksums['v2'])): return False return accurip.verify_result(self.result, responses, checksums)
def test_uses_cache_dir(self): # copy normal entry into other entry's place makedirs(dirname(join(self.cache_dir, self.other_path))) copy( join(dirname(__file__), self.path[6:]), join(self.cache_dir, self.other_path) ) # ask cache for other entry and assert cached entry equals normal entry self.assertEquals(self.entry, get_db_entry(self.other_path))
def do(self): responses = get_db_entry(self.options.url.lstrip(ACCURATERIP_URL)) count = responses[0].num_tracks sys.stdout.write("Found %d responses for %d tracks\n\n" % (len(responses), count)) for (i, r) in enumerate(responses): if r.num_tracks != count: sys.stdout.write( "Warning: response %d has %d tracks instead of %d\n" % (i, r.num_tracks, count)) # checksum and confidence by track for track in range(count): sys.stdout.write("Track %d:\n" % (track + 1)) checksums = {} for (i, r) in enumerate(responses): if r.num_tracks != count: continue assert len(r.checksums) == r.num_tracks assert len(r.confidences) == r.num_tracks entry = {} entry["confidence"] = r.confidences[track] entry["response"] = i + 1 checksum = r.checksums[track] if checksum in checksums: checksums[checksum].append(entry) else: checksums[checksum] = [ entry, ] # now sort track results in checksum by highest confidence sortedChecksums = [] for checksum, entries in checksums.items(): highest = max(d['confidence'] for d in entries) sortedChecksums.append((highest, checksum)) sortedChecksums.sort() sortedChecksums.reverse() for highest, checksum in sortedChecksums: sys.stdout.write(" %d result(s) for checksum %s: %s\n" % (len( checksums[checksum]), checksum, str(checksums[checksum])))
def do(self): responses = get_db_entry(self.options.url.lstrip(ACCURATERIP_URL)) count = responses[0].num_tracks logger.info("found %d responses for %d tracks", len(responses), count) for (i, r) in enumerate(responses): if r.num_tracks != count: logger.warning("response %d has %d tracks instead of %d", i, r.num_tracks, count) # checksum and confidence by track for track in range(count): print("Track %d:" % (track + 1)) checksums = {} for (i, r) in enumerate(responses): if r.num_tracks != count: continue assert len(r.checksums) == r.num_tracks assert len(r.confidences) == r.num_tracks entry = {} entry["confidence"] = r.confidences[track] entry["response"] = i + 1 checksum = r.checksums[track] if checksum in checksums: checksums[checksum].append(entry) else: checksums[checksum] = [entry, ] # now sort track results in checksum by highest confidence sortedChecksums = [] for checksum, entries in list(checksums.items()): highest = max(d['confidence'] for d in entries) sortedChecksums.append((highest, checksum)) sortedChecksums.sort() sortedChecksums.reverse() for highest, checksum in sortedChecksums: print(" %d result(s) for checksum %s: %s" % ( len(checksums[checksum]), checksum, checksums[checksum]))
def do(self): responses = get_db_entry(self.options.url.lstrip(ACCURATERIP_URL)) count = responses[0].num_tracks logger.info("found %d responses for %d tracks", len(responses), count) for (i, r) in enumerate(responses): if r.num_tracks != count: logger.warning("response %d has %d tracks instead of %d", i, r.num_tracks, count) # checksum and confidence by track for track in range(count): print("Track %d:" % (track + 1)) checksums = {} for (i, r) in enumerate(responses): if r.num_tracks != count: continue assert len(r.checksums) == r.num_tracks assert len(r.confidences) == r.num_tracks entry = {"confidence": r.confidences[track], "response": i + 1} checksum = r.checksums[track] if checksum in checksums: checksums[checksum].append(entry) else: checksums[checksum] = [ entry, ] # now sort track results in checksum by highest confidence sortedChecksums = [] for checksum, entries in list(checksums.items()): highest = max(d['confidence'] for d in entries) sortedChecksums.append((highest, checksum)) sortedChecksums.sort() sortedChecksums.reverse() for highest, checksum in sortedChecksums: print( " %d result(s) for checksum %s: %s" % (len(checksums[checksum]), checksum, checksums[checksum]))
def test_AccurateRipResponse_parses_correctly(self): responses = get_db_entry(self.path) self.assertEqual(len(responses), 2) self.assertEqual(responses[0].num_tracks, 2) self.assertEqual(responses[0].discId1, '0000f21c') self.assertEqual(responses[0].discId2, '00027ef8') self.assertEqual(responses[0].cddbDiscId, '05021002') self.assertEqual(responses[0].confidences[0], 12) self.assertEqual(responses[0].confidences[1], 20) self.assertEqual(responses[0].checksums[0], '284fc705') self.assertEqual(responses[0].checksums[1], '9cc1f32e') self.assertEqual(responses[1].num_tracks, 2) self.assertEqual(responses[1].discId1, '0000f21c') self.assertEqual(responses[1].discId2, '00027ef8') self.assertEqual(responses[1].cddbDiscId, '05021002') self.assertEqual(responses[1].confidences[0], 7) self.assertEqual(responses[1].confidences[1], 7) self.assertEqual(responses[1].checksums[0], 'dc77f9ab') self.assertEqual(responses[1].checksums[1], 'dd97d2c3')
def test_AccurateRipResponse_parses_correctly(self): responses = get_db_entry(self.path) self.assertEquals(len(responses), 2) self.assertEquals(responses[0].num_tracks, 2) self.assertEquals(responses[0].discId1, '0000f21c') self.assertEquals(responses[0].discId2, '00027ef8') self.assertEquals(responses[0].cddbDiscId, '05021002') self.assertEquals(responses[0].confidences[0], 12) self.assertEquals(responses[0].confidences[1], 20) self.assertEquals(responses[0].checksums[0], '284fc705') self.assertEquals(responses[0].checksums[1], '9cc1f32e') self.assertEquals(responses[1].num_tracks, 2) self.assertEquals(responses[1].discId1, '0000f21c') self.assertEquals(responses[1].discId2, '00027ef8') self.assertEquals(responses[1].cddbDiscId, '05021002') self.assertEquals(responses[1].confidences[0], 4) self.assertEquals(responses[1].confidences[1], 4) self.assertEquals(responses[1].checksums[0], 'dc77f9ab') self.assertEquals(responses[1].checksums[1], 'dd97d2c3')
def test_raises_entrynotfound_for_no_entry(self): with self.assertRaises(EntryNotFound): get_db_entry('definitely_a_404')
def test_retrieves_and_saves_accuraterip_entry(self): # for path, entry in zip(self.paths[0], self.entries): self.assertFalse(exists(join(self.cache_dir, self.path))) self.assertEquals(get_db_entry(self.path), self.entry) self.assertTrue(exists(join(self.cache_dir, self.path)))
def test_can_return_entry_without_saving(self): chmod(self.cache_dir, 0) self.assertEqual(get_db_entry(self.path), self.entry) chmod(self.cache_dir, 0755) self.assertFalse(exists(join(self.cache_dir, self.path)))
def do(self): runner = ctask.SyncRunner() device = self.options.device # if necessary, load and unmount logger.info('checking device %s', device) utils.load_device(device) utils.unmount_device(device) # first get the Table Of Contents of the CD t = cdrdao.ReadTOCTask(device) table = t.table logger.debug("CDDB disc id: %r", table.getCDDBDiscId()) responses = None try: responses = accurip.get_db_entry(table.accuraterip_path()) except accurip.EntryNotFound: logger.warning("AccurateRip entry not found: drive offset " "can't be determined, try again with another disc") return if responses: logger.debug('%d AccurateRip responses found.', len(responses)) if responses[0].cddbDiscId != table.getCDDBDiscId(): logger.warning("AccurateRip response discid different: %s", responses[0].cddbDiscId) # now rip the first track at various offsets, calculating AccurateRip # CRC, and matching it against the retrieved ones # archecksums is a tuple of accuraterip checksums: (v1, v2) def match(archecksums, track, responses): for i, r in enumerate(responses): for checksum in archecksums: if checksum == r.checksums[track - 1]: return checksum, i return None, None for offset in self._offsets: logger.info('trying read offset %d...', offset) try: archecksums = self._arcs(runner, table, 1, offset) except task.TaskException as e: # let MissingDependency fall through if isinstance(e.exception, common.MissingDependencyException): raise e if isinstance(e.exception, cdparanoia.FileSizeError): logger.warning('cannot rip with offset %d...', offset) continue logger.warning("unknown task exception for offset %d: %s", offset, e) logger.warning('cannot rip with offset %d...', offset) continue logger.debug('AR checksums calculated: %s %s', archecksums) c, i = match(archecksums, 1, responses) if c: count = 1 logger.debug('matched against response %d', i) logger.info('offset of device is likely %d, confirming...', offset) # now try and rip all other tracks as well, except for the # last one (to avoid readers that can't do overread for track in range(2, (len(table.tracks) + 1) - 1): try: archecksums = self._arcs(runner, table, track, offset) except task.TaskException as e: if isinstance(e.exception, cdparanoia.FileSizeError): logger.warning('cannot rip with offset %d...', offset) continue c, i = match(archecksums, track, responses) if c: logger.debug('matched track %d against response %d', track, i) count += 1 if count == len(table.tracks) - 1: self._foundOffset(device, offset) return 0 else: logger.warning( 'only %d of %d tracks matched, ' 'continuing...', count, len(table.tracks)) logger.error('no matching offset found. ' 'Consider trying again with a different disc')
def do(self): runner = ctask.SyncRunner() device = self.options.device # if necessary, load and unmount sys.stdout.write('Checking device %s\n' % device) utils.load_device(device) utils.unmount_device(device) # first get the Table Of Contents of the CD t = cdrdao.ReadTOCTask(device) table = t.table logger.debug("CDDB disc id: %r", table.getCDDBDiscId()) responses = None try: responses = accurip.get_db_entry(table.accuraterip_path()) except accurip.EntryNotFound: print('Accuraterip entry not found') if responses: logger.debug('%d AccurateRip responses found.' % len(responses)) if responses[0].cddbDiscId != table.getCDDBDiscId(): logger.warning("AccurateRip response discid different: %s", responses[0].cddbDiscId) # now rip the first track at various offsets, calculating AccurateRip # CRC, and matching it against the retrieved ones # archecksums is a tuple of accuraterip checksums: (v1, v2) def match(archecksums, track, responses): for i, r in enumerate(responses): for checksum in archecksums: if checksum == r.checksums[track - 1]: return checksum, i return None, None for offset in self._offsets: sys.stdout.write('Trying read offset %d ...\n' % offset) try: archecksums = self._arcs(runner, table, 1, offset) except task.TaskException, e: # let MissingDependency fall through if isinstance(e.exception, common.MissingDependencyException): raise e if isinstance(e.exception, cdparanoia.FileSizeError): sys.stdout.write( 'WARNING: cannot rip with offset %d...\n' % offset) continue logger.warning("Unknown task exception for offset %d: %r" % ( offset, e)) sys.stdout.write( 'WARNING: cannot rip with offset %d...\n' % offset) continue logger.debug('AR checksums calculated: %s %s' % archecksums) c, i = match(archecksums, 1, responses) if c: count = 1 logger.debug('MATCHED against response %d' % i) sys.stdout.write( 'Offset of device is likely %d, confirming ...\n' % offset) # now try and rip all other tracks as well, except for the # last one (to avoid readers that can't do overread for track in range(2, (len(table.tracks) + 1) - 1): try: archecksums = self._arcs(runner, table, track, offset) except task.TaskException, e: if isinstance(e.exception, cdparanoia.FileSizeError): sys.stdout.write( 'WARNING: cannot rip with offset %d...\n' % offset) continue c, i = match(archecksums, track, responses) if c: logger.debug('MATCHED track %d against response %d' % ( track, i)) count += 1 if count == len(table.tracks) - 1: self._foundOffset(device, offset) return 0 else: sys.stdout.write( 'Only %d of %d tracks matched, continuing ...\n' % ( count, len(table.tracks)))