def test_image_type(self): """Test image specific functionality""" filename = self._get_file('jpg') pk = FileModel.factory(filename).save().id() model = FileModel().load(pk) self.assertGreater(model.width(), 0) self.assertGreater(model.height(), 0)
def test_image_type(self): """Test image specific functionality""" filename = self._get_file("jpg") pk = FileModel.factory(filename).save().id() model = FileModel().load(pk) self.assertGreater(model.width(), 0) self.assertGreater(model.height(), 0)
def test_factory(self): """Test file factory function""" filename = self._get_file("txt") pk = FileModel.factory(filename).save().id() self.assertIsNotNone(pk) model = FileModel().load(pk) self.assertEqual(model.name(), os.path.basename(filename)) self.assertEqual(model.extension(), "txt") self.assertEqual(model.abspath(), os.path.dirname(os.path.abspath(filename)))
def index_device(self, device): """Index all new files for a given device""" transfer_dirs = device.get_transfer_dirs() for transfer_dir in transfer_dirs: abstrans = os.path.join(self._basedir, transfer_dir) flagfile = os.path.join(abstrans, '.__indexed__') if os.path.isfile(flagfile): continue for abspath, dirs, files in os.walk(abstrans): for filename in files: devpath = re.sub(r'^%s' % re.escape(abstrans), '', abspath) absfile = os.path.join(abspath, filename) file_model = FileModel.factory(absfile) file_model.add_data({ 'devpath': devpath, 'device': device.id() }) try: file_model.save() except sqlite3.IntegrityError: # unique index on "name", "devpath" and "checksum" duplicates = FileModel.all().where( 'name = ?', file_model.name()).where( 'devpath = ?', file_model.devpath()).where( 'checksum = ?', file_model.checksum()).limit(1) if (len(duplicates) > 0 and duplicates[0].abspath() != file_model.abspath()): # the new file is identical to an old one but not # the same file, lets unlik it. duplicate = os.path.join( file_model.abspath(), file_model.name()) try: os.unlink(duplicate) logger.notice('Removed duplicate %s' % duplicate) try: os.rmdir(file_model.abspath()) except OSError: pass # dir is not empty except OSError: logger.error('Unable to remove duplicate %s' % duplicate) logger.info('%s already exists, skipping..' % os.path.join(devpath, filename)) open(flagfile, 'w').close() # touch indexed flag
def index_device(self, device): """Index all new files for a given device""" transfer_dirs = device.get_transfer_dirs() for transfer_dir in transfer_dirs: abstrans = os.path.join(self._basedir, transfer_dir) flagfile = os.path.join(abstrans, '.__indexed__') if os.path.isfile(flagfile): continue for abspath, dirs, files in os.walk(abstrans): for filename in files: devpath = re.sub(r'^%s' % re.escape(abstrans), '', abspath) absfile = os.path.join(abspath, filename) file_model = FileModel.factory(absfile) file_model.add_data({ 'devpath': devpath, 'device': device.id() }) try: file_model.save() except sqlite3.IntegrityError: # unique index on "name", "devpath" and "checksum" duplicates = FileModel.all().where( 'name = ?', file_model.name()).where( 'devpath = ?', file_model.devpath()).where( 'checksum = ?', file_model.checksum()).limit(1) if (len(duplicates) > 0 and duplicates[0].abspath() != file_model.abspath()): # the new file is identical to an old one but not # the same file, lets unlik it. duplicate = os.path.join(file_model.abspath(), file_model.name()) try: os.unlink(duplicate) logger.notice('Removed duplicate %s' % duplicate) try: os.rmdir(file_model.abspath()) except OSError: pass # dir is not empty except OSError: logger.error('Unable to remove duplicate %s' % duplicate) logger.info('%s already exists, skipping..' % os.path.join(devpath, filename)) open(flagfile, 'w').close() # touch indexed flag
def test_factory(self): """Test file factory function""" filename = self._get_file('txt') pk = FileModel.factory(filename).save().id() self.assertIsNotNone(pk) model = FileModel().load(pk) self.assertEqual(model.name(), os.path.basename(filename)) self.assertEqual(model.extension(), 'txt') self.assertEqual(model.abspath(), os.path.dirname(os.path.abspath(filename)))