def index_device(self, device): """Index all new files for a given device""" transfer_dirs = device.get_transfer_dirs() for transfer_dir in transfer_dirs: abstrans = os.path.join(self._basedir, transfer_dir) flagfile = os.path.join(abstrans, '.__indexed__') if os.path.isfile(flagfile): continue for abspath, dirs, files in os.walk(abstrans): for filename in files: devpath = re.sub(r'^%s' % re.escape(abstrans), '', abspath) absfile = os.path.join(abspath, filename) file_model = FileModel.factory(absfile) file_model.add_data({ 'devpath': devpath, 'device': device.id() }) try: file_model.save() except sqlite3.IntegrityError: # unique index on "name", "devpath" and "checksum" duplicates = FileModel.all().where( 'name = ?', file_model.name()).where( 'devpath = ?', file_model.devpath()).where( 'checksum = ?', file_model.checksum()).limit(1) if (len(duplicates) > 0 and duplicates[0].abspath() != file_model.abspath()): # the new file is identical to an old one but not # the same file, lets unlik it. duplicate = os.path.join( file_model.abspath(), file_model.name()) try: os.unlink(duplicate) logger.notice('Removed duplicate %s' % duplicate) try: os.rmdir(file_model.abspath()) except OSError: pass # dir is not empty except OSError: logger.error('Unable to remove duplicate %s' % duplicate) logger.info('%s already exists, skipping..' % os.path.join(devpath, filename)) open(flagfile, 'w').close() # touch indexed flag
def index_device(self, device): """Index all new files for a given device""" transfer_dirs = device.get_transfer_dirs() for transfer_dir in transfer_dirs: abstrans = os.path.join(self._basedir, transfer_dir) flagfile = os.path.join(abstrans, '.__indexed__') if os.path.isfile(flagfile): continue for abspath, dirs, files in os.walk(abstrans): for filename in files: devpath = re.sub(r'^%s' % re.escape(abstrans), '', abspath) absfile = os.path.join(abspath, filename) file_model = FileModel.factory(absfile) file_model.add_data({ 'devpath': devpath, 'device': device.id() }) try: file_model.save() except sqlite3.IntegrityError: # unique index on "name", "devpath" and "checksum" duplicates = FileModel.all().where( 'name = ?', file_model.name()).where( 'devpath = ?', file_model.devpath()).where( 'checksum = ?', file_model.checksum()).limit(1) if (len(duplicates) > 0 and duplicates[0].abspath() != file_model.abspath()): # the new file is identical to an old one but not # the same file, lets unlik it. duplicate = os.path.join(file_model.abspath(), file_model.name()) try: os.unlink(duplicate) logger.notice('Removed duplicate %s' % duplicate) try: os.rmdir(file_model.abspath()) except OSError: pass # dir is not empty except OSError: logger.error('Unable to remove duplicate %s' % duplicate) logger.info('%s already exists, skipping..' % os.path.join(devpath, filename)) open(flagfile, 'w').close() # touch indexed flag
def test_log(self): """Test LogHelper functions""" LogHelper.set_level(LogModel.WARN) entry = LogHelper.crit('Critical Error') self.assertGreater(entry.id(), 0) self.assertEqual(entry.message(), 'Critical Error') entry = LogHelper.warn('A Warning') self.assertGreater(entry.id(), 0) self.assertEqual(entry.message(), 'A Warning') entry = LogHelper.info('Just Information') self.assertIsNone(entry) LogHelper.set_level(LogModel.INFO) entry = LogHelper.info('Just Information') self.assertIsNotNone(entry)